<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Psychol.</journal-id>
<journal-title>Frontiers in Psychology</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Psychol.</abbrev-journal-title>
<issn pub-type="epub">1664-1078</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpsyg.2022.847590</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Psychology</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Age-Related Performance in Using a Fully Immersive and Automated Virtual Reality System to Assess Cognitive Function</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name><surname>Tan</surname> <given-names>Ngiap Chuan</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/975551/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Lim</surname> <given-names>Jie En</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1068125/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Allen</surname> <given-names>John Carson</given-names> <suffix>Jr.</suffix></name>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/542680/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Wong</surname> <given-names>Wei Teen</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1435151/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Quah</surname> <given-names>Joanne Hui Min</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Muthulakshmi</surname> <given-names>Paulpandi</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Teh</surname> <given-names>Tuan Ann</given-names></name>
<xref ref-type="aff" rid="aff6"><sup>6</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Lim</surname> <given-names>Soon Huat</given-names></name>
<xref ref-type="aff" rid="aff6"><sup>6</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Malhotra</surname> <given-names>Rahul</given-names></name>
<xref ref-type="aff" rid="aff7"><sup>7</sup></xref>
<xref ref-type="aff" rid="aff8"><sup>8</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1004852/overview"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Duke-NUS Medical School</institution>, <addr-line>Singapore</addr-line>, <country>Singapore</country></aff>
<aff id="aff2"><sup>2</sup><institution>SingHealth Polyclinics</institution>, <addr-line>Singapore</addr-line>, <country>Singapore</country></aff>
<aff id="aff3"><sup>3</sup><institution>SingHealth Duke-NUS Family Medicine Academic Clinical Programme, Duke-NUS Medical School</institution>, <addr-line>Singapore</addr-line>, <country>Singapore</country></aff>
<aff id="aff4"><sup>4</sup><institution>Centre for Quantitative Medicine, Duke-NUS Medical School</institution>, <addr-line>Singapore</addr-line>, <country>Singapore</country></aff>
<aff id="aff5"><sup>5</sup><institution>SingHealth Polyclinics-Outram, SingHealth Polyclinics</institution>, <addr-line>Singapore</addr-line>, <country>Singapore</country></aff>
<aff id="aff6"><sup>6</sup><institution>Technology Development Centre, Institute of Technical Education College West</institution>, <addr-line>Singapore</addr-line>, <country>Singapore</country></aff>
<aff id="aff7"><sup>7</sup><institution>Centre for Ageing Research and Education, Duke-NUS Medical School</institution>, <addr-line>Singapore</addr-line>, <country>Singapore</country></aff>
<aff id="aff8"><sup>8</sup><institution>Health Services and Systems Research, Duke-NUS Medical School</institution>, <addr-line>Singapore</addr-line>, <country>Singapore</country></aff>
<author-notes>
<fn fn-type="edited-by"><p>Edited by: Mattia Siciliano, University of Campania Luigi Vanvitelli, Italy</p></fn>
<fn fn-type="edited-by"><p>Reviewed by: Edoardo Nicol&#x00F2; Aiello, University of Milano-Bicocca, Italy; Elisa Montanaro, University of Turin, Italy</p></fn>
<corresp id="c001">&#x002A;Correspondence: Ngiap Chuan Tan, <email>tan.ngiap.chuan@singhealth.com.sg</email></corresp>
<fn fn-type="other" id="fn004"><p>This article was submitted to Neuropsychology, a section of the journal Frontiers in Psychology</p></fn>
</author-notes>
<pub-date pub-type="epub">
<day>11</day>
<month>03</month>
<year>2022</year>
</pub-date>
<pub-date pub-type="collection">
<year>2022</year>
</pub-date>
<volume>13</volume>
<elocation-id>847590</elocation-id>
<history>
<date date-type="received">
<day>13</day>
<month>01</month>
<year>2022</year>
</date>
<date date-type="accepted">
<day>16</day>
<month>02</month>
<year>2022</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2022 Tan, Lim, Allen, Wong, Quah, Muthulakshmi, Teh, Lim and Malhotra.</copyright-statement>
<copyright-year>2022</copyright-year>
<copyright-holder>Tan, Lim, Allen, Wong, Quah, Muthulakshmi, Teh, Lim and Malhotra</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p></license>
</permissions>
<abstract>
<sec>
<title>Introduction</title>
<p>Cognition generally declines gradually over time due to progressive degeneration of the brain, leading to dementia and eventual loss of independent functions. The rate of regression varies among the six cognitive domains (perceptual motor, executive function, complex attention, learning and memory, social cognition and language). Current modality of cognitive assessment using neuropsychological paper-and-pencil screening tools for cognitive impairment such as the Montreal Cognitive Assessment (MoCA) has limitations and is influenced by age. Virtual reality (VR) is considered as a potential alternative tool to assess cognition. A novel, fully immersive automated VR system (Cognitive Assessment using Virtual Reality, CAVIRE) has been developed to assess the six cognitive domains. As cognition is associated with age, VR performance is postulated to vary with age using this system.</p>
</sec>
<sec>
<title>Aims</title>
<p>This is a feasibility study to evaluate the VR performance of cognitively healthy adults aged between 35 and 74 years old, based on the performance score and completion time using the CAVIRE system.</p>
</sec>
<sec>
<title>Methods</title>
<p>Conducted in a public primary care clinic in Singapore, 25 multi-ethnic Asian adults were recruited in each of the four age groups in years: (1) 35&#x2013;44; (2) 45&#x2013;54; (3) 55&#x2013;64, and (4) 65&#x2013;74. The eligibility criteria included a MoCA score of 26 or higher to reflect normal cognition and understanding English instructions. They completed common daily activities ranging from brushing teething to shopping, across 13 VR segments. Their performances scores and completion time were automatically computed by the CAVIRE system. These VR performance indices were compared across the four age groups using one-way ANOVA, <italic>F</italic>-test of the hypothesis, followed by pair-wise comparisons in the event of a significant <italic>F</italic>-test (<italic>p</italic> &#x003C; 0.05).</p>
</sec>
<sec>
<title>Results</title>
<p>One participant dropped out from Group 1. The demographic characteristics of 99 participants were similar across the 4 age groups. Overall, younger participants in Groups 1 and 2 attained higher VR performance scores and shorter completion time, compared to those in Groups 3 and 4, in all six cognitive domains (all <italic>p</italic> &#x003C; 0.05).</p>
</sec>
<sec>
<title>Conclusion</title>
<p>The CAVIRE VR performance scores and completion time significantly differ between the younger and older Asian participants with normal cognition. Enhancements to the system are needed to establish the age-group specific normal performance indices.</p>
</sec>
</abstract>
<kwd-group>
<kwd>cognition</kwd>
<kwd>virtual reality</kwd>
<kwd>age</kwd>
<kwd>performance score</kwd>
<kwd>time</kwd>
</kwd-group>
<contract-sponsor id="cn001">Mitsui Sumitomo Insurance Welfare Foundation<named-content content-type="fundref-id">10.13039/100008734</named-content></contract-sponsor>
<contract-sponsor id="cn002">SingHealth Foundation<named-content content-type="fundref-id">10.13039/501100004327</named-content></contract-sponsor>
<contract-sponsor id="cn003">Duke-NUS Medical School<named-content content-type="fundref-id">10.13039/100016017</named-content></contract-sponsor>
<counts>
<fig-count count="3"/>
<table-count count="4"/>
<equation-count count="0"/>
<ref-count count="25"/>
<page-count count="13"/>
<word-count count="8330"/>
</counts>
</article-meta>
</front>
<body>
<sec id="S1" sec-type="intro">
<title>Introduction</title>
<p>Dementia is prevalent globally. According to the World Health Organisation, approximately 50 million people worldwide are affected by dementia in 2019, and this figure is projected to triple to 152 million by 2050 (<xref ref-type="bibr" rid="B24">World Health Organisation, 2019</xref>).</p>
<p>Being a progressive neurodegenerative disorder, dementia involves gradual cognitive decline over time, leading to eventual loss of independent functions. The decline varies across the six cognitive domains. According to the Diagnostic and Statistical Manual of Mental Disorders (DSM-5), the six domains of cognitive function include: perceptual-motor function, executive function, complex attention, social cognition, learning and memory, and language (<xref ref-type="bibr" rid="B3">American Psychiatric Association, 2013</xref>). Therefore, early identification of cognitive impairment in any domain is pivotal to initiate appropriate interventions to retard its further decline (<xref ref-type="bibr" rid="B6">Brasure et al., 2018</xref>).</p>
<p>At present, the standard cognitive assessment method involves traditional neuropsychological paper-and-pencil screening tools for cognitive impairment such as the Montreal Cognitive Assessment (MoCA) and the Mini-Mental State Examination (MMSE) (<xref ref-type="bibr" rid="B1">Abd Razak et al., 2019</xref>). Neuropsychological tests are influenced by sociodemographic variables such as age, number of years of education, gender and cultural background (<xref ref-type="bibr" rid="B2">Acevedo et al., 2007</xref>). Younger age is associated with better performance among cognitively healthy participants in various neuropsychological tests, including the MoCA (<xref ref-type="bibr" rid="B12">Larouche et al., 2016</xref>); and the MMSE (<xref ref-type="bibr" rid="B18">Piccinin et al., 2013</xref>).</p>
<p>Neuropsychological paper-and-pencil tests have shown to be lacking in ecological validity, where the individual&#x2019;s performance on the tests do not adequately predict the real-world functioning of the individual (<xref ref-type="bibr" rid="B21">Spooner and Pachana, 2006</xref>). On the other hand, studies have shown that virtual reality has the potential for enhanced ecological validity to provide a better assessment of the individual&#x2019;s cognitive function in the real-world setting (<xref ref-type="bibr" rid="B17">Parsons, 2015</xref>).</p>
<p>In recent years, virtual reality (VR) has been deployed to assess cognition. By wearing a head-mounted device, VR enables a person to experience simulated real-life situations. VR allows the evaluation of multiple cognitive domains such as memory and executive functions. It can potentially be self-administered, requires minimal training, provides a pleasant experience and decreases the psychological distress caused by using conventional screening tools (<xref ref-type="bibr" rid="B11">Jin et al., 2020</xref>). Studies have demonstrated the promising applications of VR in cognition evaluation. <xref ref-type="bibr" rid="B22">Vallejo et al. (2017)</xref> used VR to assess cognitive processes and everyday life, including a virtual cooking scenario. <xref ref-type="bibr" rid="B8">Davison et al. (2017)</xref> developed a virtual parking simulator for cognitive assessment.</p>
<p>Age affects both cognition and VR performance. Cognition declines with age, although younger adults can be affected by Alzheimer&#x2019;s Disease and other forms of dementia. Studies have revealed that generally younger individuals have better VR performance compared to those who are older (<xref ref-type="bibr" rid="B8">Davison et al., 2017</xref>; <xref ref-type="bibr" rid="B16">Ouellet et al., 2018</xref>; <xref ref-type="bibr" rid="B19">Plechat&#x00E1; et al., 2019</xref>). <xref ref-type="bibr" rid="B20">Sakai et al. (2018)</xref> reported significant differences in VR performance scores across age bands of 20 years, and the decline of VR performance is more obvious from the fifth decade onward. However, the results of these studies are often based on their participation in limited number of VR tasks. The study by <xref ref-type="bibr" rid="B19">Plechat&#x00E1; et al. (2019)</xref> is an example. They assessed participants&#x2019; recall of a shopping list to complete their virtual supermarket task. Overall, very little research has been done on using VR to assess cognitive function based on all six cognitive domains. Moreover, evidence is scarce on the implementation of a single VR system to assess the cognitive function of individuals across a wide span of age groups.</p>
<p>Therefore, the need to design a more comprehensive VR system is imperative. A novel VR system that is capable of giving automated audio-visual instructions while individuals perform tasks that cover all six cognitive domains has been developed for Asians living in urban setting (<xref ref-type="bibr" rid="B13">Lim et al., 2021</xref>). Known as CAVIRE (Cognitive Assessment using VIrtual REality) system, it is developed to assess the cognition of community dwelling, ambulatory, older multi-ethnic Asians living in densely populated housing estates.</p>
<p>In this paper, we have recruited participants from different age groups, ranging from 35 years old to 74 years old, and assessed their cognitive function using CAVIRE based on the six cognitive domains. By assessing the performance indicators for each of the six cognitive domains, we would like to establish the system-specific normogram of VR performance for adults across different age groups. Such a normogram is crucial to identify adults with abnormal cognition based on their VR performance in the context of their age group.</p>
<p>Cognitively healthy younger adults are postulated to achieve better performance scores and shorter completion time compared to cognitively healthy older people. Understanding the differential VR performance across the different age groups and cognitive domains will enhance the utility potential of CAVIRE to assess cognition in normal aging.</p>
</sec>
<sec id="S2">
<title>Aims</title>
<p>This is a feasibility study aimed to assess the performance of cognitively healthy Asian adults aged between 35 and 74 years old to complete tasks in a fully immersive and automated VR system. Their performance was evaluated using a score matrix on the correct attempts and the time taken to complete the tasks across thirteen virtual segments.</p>
</sec>
<sec id="S3" sec-type="materials|methods">
<title>Materials and Methods</title>
<p>This manuscript presents the results from a sub-analysis of a published study protocol on the CAVIRE system (<xref ref-type="bibr" rid="B13">Lim et al., 2021</xref>).</p>
<sec id="S3.SS1">
<title>The Cognitive Assessment Using Virtual Reality System</title>
<p>The system includes a pre-assessment tutorial session, thirteen virtual segments to assess cognitive function, and an automated scoring system.</p>
<p>During the tutorial session, participants put on a head-mounted display device (HMD), namely HTC VIVE Pro, and are inducted to the VR environment by using head movements and hand movements respectively. The hand movements are recorded by the Leap Motion controller. It is a small USB peripheral device mounted onto the HMD. Using two monochromatic infra-red (IR) cameras and three IR LEDs, the device observes a roughly hemispherical area, to a distance of about one meter. The LEDs generate pattern-less IR light and the cameras generate almost 200 frames per second of reflected data. This is then sent through a USB cable to the computer, where it is analyzed by the Leap Motion software using complex algorithms. The 3D position data of the hand movements is synthesized by comparing the 2D frames generated by the two cameras.</p>
<p>The Voice Recognition software utilizes built-in microphone from Windows PC for the voice input. The Application Programming Interface (API) is integrated within the system. It will automatically detect the participant&#x2019;s speech without recording or active speech extraction. However, for the purposes for our study, we use an external USB microphone. The Voice Recognition software is customized to detect the participant&#x2019;s correct pronunciation of multiple fruit names in English with a Graphic User Interface (GUI) feedback indication. They are integrated and run in parallel with other Unity3D Applications of the CAVIRE software.</p>
<p>Once participants indicate readiness to the researcher, they proceed to begin with the VR segments. Each segment features common day-to-day activities to assess specific domains of cognitive function. These activities are contextualized to the local Singapore setting and would be familiar to both the young and old participants. Participants perform these virtual tasks via hand gestures and head movements which are detected by motion sensors. Their speech is assessed using the Voice Recognition technology embedded in the system. The automated voice and visual instructions in English guide the participants to complete the tasks.</p>
<p>The participants perform the VR tasks across the following thirteen segments:</p>
<list list-type="simple">
<list-item>
<label>(1)</label>
<p>Brushing and rinsing teeth.</p>
</list-item>
<list-item>
<label>(2)</label>
<p>Preparing peanut butter bread for breakfast.</p>
</list-item>
<list-item>
<label>(3)</label>
<p>Identifying pictures of important persons in the newspaper.</p>
</list-item>
<list-item>
<label>(4)</label>
<p>Watching television, while listening to the weather forecast regarding impending rain on the radio.</p>
</list-item>
<list-item>
<label>(5)</label>
<p>Naming the fruits in a shopping list and remembering the fruits.</p>
</list-item>
<list-item>
<label>(6)</label>
<p>Choosing the appropriate clothing to go for grocery shopping.</p>
</list-item>
<list-item>
<label>(7)</label>
<p>Remembering to pick up the umbrella, before opening and locking the door.</p>
</list-item>
<list-item>
<label>(8)</label>
<p>Taking the lift to level 1 in an apartment block by pressing the correct buttons.</p>
</list-item>
<list-item>
<label>(9)</label>
<p>Looking to the left and right, and waiting for green pedestrian light, before crossing the street.</p>
</list-item>
<list-item>
<label>(10)</label>
<p>Remembering and choosing the stipulated stall, i.e., the one which sells fruits.</p>
</list-item>
<list-item>
<label>(11)</label>
<p>Picking the correct fruits based on recall from the shopping list.</p>
</list-item>
<list-item>
<label>(12)</label>
<p>Calculating and paying the correct sum of money for all the fruits.</p>
</list-item>
<list-item>
<label>(13)</label>
<p>Selecting the appropriate emotion, with regards to scenes of a birthday party and car accident respectively.</p>
</list-item>
</list>
<p>With regards to <xref ref-type="fig" rid="F1">Figure 1</xref>, these thirteen segments cover the six cognitive domains, as defined by the 5th edition of the Diagnostic and Statistical Manual of Mental Disorders (DSM-5) (<xref ref-type="bibr" rid="B3">American Psychiatric Association, 2013</xref>). Four segments (6, 8, 11, and 13) assess a single cognitive domain, while the tasks in the remaining nine segments evaluate two or more cognitive domains. For a balanced evaluation framework, each cognitive domain is assessed over four different segments. The system was pilot-tested internally with young and old volunteers and was further enhanced over three revisions. The maximum amount of time needed to complete the VR assessment across the thirteen segments is approximately 12 min.</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption><p>Cognitive domain assessed in each segment.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpsyg-13-847590-g001.tif"/>
</fig>
<p>The CAVIRE system features an automated scoring system to assess each task performance. The scores are computed automatically for each segment and for the entire VR assessment. The scoring matrix comprises (i) the proportion of tasks performed correctly; (ii) number of attempts needed to complete each task and (iii) completion of tasks within the stipulated time. The matrix is illustrated in &#x201C;<xref ref-type="table" rid="T4">Appendix Table A1</xref> &#x2013; Scoring algorithm of the VR assessment.&#x201D;</p>
</sec>
<sec id="S3.SS2">
<title>Study Site</title>
<p>A public primary care clinic (polyclinic) in the southern region of Singapore was determined as the study site. This polyclinic provides comprehensive primary healthcare services to an estimated population of 18,960 residents of varying Asian ethnicity in the Outram estate, of which 24.7% were aged 65 years and above in 2019 (<xref ref-type="bibr" rid="B9">Department of Statistics et al., 2019</xref>). About 80% of its patient population originated from other estates in Singapore, as it provides accessible healthcare services to employees working in the central business district in the proximity.</p>
</sec>
<sec id="S3.SS3">
<title>Study Population</title>
<p>Participants included those who sought medical consultation at the polyclinic as well as visitors and accompanying persons of patients at the polyclinic if they satisfied the eligibility criteria: (1) aged between 35 and 74 years old, and (2) understood English (the medium of instruction in CAVIRE, and (3) willing to complete the questionnaires and the CAVIRE VR assessment, and (4) MoCA score of 26 or more.</p>
<p>Individuals with any of the following were excluded: pre-existing diagnosis of cognitive impairment or dementia as documented in their electronic medical record; any disability which rendered them incapable of providing written informed consent; neurological deficits that might affect vision, hearing, speech or motor skills; or known motion sickness or epilepsy.</p>
</sec>
<sec id="S3.SS4">
<title>Sample Size</title>
<p>A total of 25 participants were recruited in each 10-year age group: (1) 35&#x2013;44 years old; (2) 45&#x2013;54 years old; (3) 55&#x2013;64 years old; (4) 65&#x2013;74 years old. As this is a feasibility study, sample size calculation is not necessary, but sample size justifications need to be provided (<xref ref-type="bibr" rid="B4">Billingham et al., 2013</xref>). The Modified Wald method was used to calculate the confidence interval of a proportion. Using a one-sided 95% confidence interval, based on a proportion of 23 out of 25 participants in a particular age group, at least 77% of participants in that age group who undertake the CAVIRE assessment in the future would be able to complete it. For the purposes of this feasibility study, a 90% completion rate, which equates to 23 out of 25 participants in each age group, would be considered adequate. Thus, VR performance of 100 cognitively healthy participants, 25 in each of four different age groups, were enrolled.</p>
</sec>
<sec id="S3.SS5">
<title>Recruitment and Procedure During the Study Administration</title>
<p>Prior to the start of the study, the Research Assistant (RA) was trained by physicians and Advanced Practice Nurse on the recruitment procedure and the administration of the MoCA test. During the study implementation, the RA screened the eligibility of potential participants at the waiting area of the clinic, or via internal referral from study investigators. The RA explained the study protocol, obtained written informed consent, and confirmed the participants&#x2019; eligibility criteria from their electronic medical records before administering the MoCA.</p>
<p>Cognitively healthy participants, i.e., with a MoCA score of 26 or higher were enrolled. While scoring the MoCA, an additional point correction was accorded to those with &#x2264;10 years of education (<xref ref-type="bibr" rid="B14">Ng et al., 2013</xref>). Those who attained a MoCA score of less than 26 were excluded and referred for further clinical assessment at the polyclinic.</p>
<p>The Pre-VR questionnaire gathered the participants&#x2019; demographic data (age, gender, ethnicity, number of years of education) and scores from the following validated cognitive and functional status assessments: (1) Abbreviated Mental Test (AMT) and (2) Mini-Mental State Examination (MMSE).</p>
<p>Next, the participants were briefed on the VR procedure and equipment. With the help of the RA, the participants sat on a chair and put on the VR head-mounted device. The participants were then introduced to a tutorial session. This tutorial session allowed all participants, regardless of age, to familiarize themselves and feel comfortable in using their head and hand movements in the VR environment. Once the participants were ready, they proceeded to complete the 13 segments of the VR assessment. The scores and the time taken to complete each segment were automatically computed in the CAVIRE system and were aggregated for their overall VR performance.</p>
</sec>
<sec id="S3.SS6">
<title>Outcome Measurements</title>
<p>The CAVIRE performance-indices were computed according to the matrix for each cognitive domain in the four age groups: (1) 35&#x2013;44 years old; (2) 45&#x2013;54 years old; (3) 55&#x2013;64 years old; (4) 65&#x2013;74 years old. The aggregated indices are comprised of both the VR performance scores and completion time.</p>
</sec>
<sec id="S3.SS7">
<title>Data Management and Monitoring</title>
<p>The data from the questionnaires were transcribed into Redcap, a secure research database, and audited by a data management officer in the institution for errors. The VR data from the CAVIRE system was exported to the same database and merged with the audited questionnaire data. The anonymized combined data were handed over to data analysts in the study team. Participants who complained of headache, nausea or giddiness during the VR assessment were advised to stop the procedure and considered as dropouts.</p>
</sec>
<sec id="S3.SS8">
<title>Statistical Analysis</title>
<p>Potential confounders were compared among the four age groups. The potential confounders include gender (male, female), ethnicity (Chinese, non-Chinese), education level (up to secondary, post-secondary/tertiary), and housing (public, private) as a surrogate for socio-economic status. As no significant difference among the four age groups for any confounder was found, the VR performance indices (VR performance score and VR completion time) for the six cognitive domains were compared across the age groups. This was done using a one-way analysis of variance (ANOVA) <italic>F</italic>-test of the hypothesis H<sub>0</sub>: &#x03BC;<sub>1</sub> = &#x03BC;<sub>2</sub> = &#x03BC;<sub>3</sub> = &#x03BC;<sub>4</sub> (all means equal) vs. H<sub>1</sub>: &#x03BC;<sub><italic>i</italic></sub> &#x2260;&#x03BC;<sub><italic>j</italic></sub> for at least one <italic>i</italic> &#x2260; <italic>j</italic> (<italic>i</italic> = <italic>j</italic> = 1, 2, 3, 4) (at least two means different). For the rejection of H<sub>0</sub>, an <italic>F</italic>-test <italic>p</italic>-value of <italic>p</italic> &#x003C; 0.05 was considered statistically significant. This is followed by pair-wise comparisons among the four age groups for the VR performance score and the VR completion time respectively. In addition, a linear trend across the four age groups was tested using a contrast. Based on linearized Q-Q plots, the normality of residuals was assessed visually and found to be tenable. For this study, all statistical analyses were done by utilizing the software SAS v9.4.</p>
</sec>
</sec>
<sec id="S4" sec-type="results">
<title>Results</title>
<p>This segment of the study commenced recruitment in October 2020 and was completed by January 2021. Only one participant from the 35 to 44 years age group failed to complete the study due to apprehension during the administration of the MoCA questionnaire, constituting a dropout rate of 1%. No participant experienced any adverse effect while performing the VR tasks.</p>
<p>Demographic characteristics of the remaining 99 participants are presented in <xref ref-type="table" rid="T1">Table 1</xref>. The characteristics are similar across the four age groups, labeled as Group 1 for 35&#x2013;44 years old; Group 2 for 45&#x2013;54 years old; Group 3 for 55&#x2013;64 years old and Group 4 for 65&#x2013;74 years old.</p>
<table-wrap position="float" id="T1">
<label>TABLE 1</label>
<caption><p>Demographic characteristic frequency counts (%) and comparison among age groups.</p></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<td valign="top" align="left" colspan="2">Demographic characteristic</td>
<td valign="top" align="center">Overall (<italic>n</italic> = 99)</td>
<td valign="top" align="center" colspan="5">Age group (years) Count (%)<hr/></td>
</tr>
<tr>
<td valign="top" colspan="2"/><td valign="top" align="center"/><td valign="top" align="center">35&#x2013;44 (<italic>n</italic> = 24)</td>
<td valign="top" align="center">45&#x2013;54 (<italic>n</italic> = 25)</td>
<td valign="top" align="center">55&#x2013;64 (<italic>n</italic> = 25)</td>
<td valign="top" align="center">65&#x2013;74 (<italic>n</italic> = 25)</td>
<td valign="top" align="center"><italic>p</italic>-value</td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Gender</td>
<td valign="top" align="left">Male</td>
<td valign="top" align="center">44 (44.4)</td>
<td valign="top" align="center">12 (27.3)</td>
<td valign="top" align="center">11 (25.0)</td>
<td valign="top" align="center">12 (27.3)</td>
<td valign="top" align="center">9 (20.5)</td>
<td valign="top" align="center">0.765</td>
</tr>
<tr>
<td/>
<td valign="top" align="left">Female</td>
<td valign="top" align="center">55 (55.6)</td>
<td valign="top" align="center">12 (21.8)</td>
<td valign="top" align="center">14 (25.5)</td>
<td valign="top" align="center">13 (23.6)</td>
<td valign="top" align="center">16 (29.1)</td>
<td/>
</tr>
<tr>
<td valign="top" align="left">Ethnicity</td>
<td valign="top" align="left">Chinese</td>
<td valign="top" align="center">77 (77.8)</td>
<td valign="top" align="center">19 (24.7)</td>
<td valign="top" align="center">16 (20.8)</td>
<td valign="top" align="center">21 (27.3)</td>
<td valign="top" align="center">21 (27.3)</td>
<td valign="top" align="center">0.273</td>
</tr>
<tr>
<td/>
<td valign="top" align="left">Non-Chinese</td>
<td valign="top" align="center">22 (22.2)</td>
<td valign="top" align="center">5 (22.7)</td>
<td valign="top" align="center">9 (40.9)</td>
<td valign="top" align="center">4 (18.2)</td>
<td valign="top" align="center">4 (18.2)</td>
<td/>
</tr>
<tr>
<td valign="top" align="left">Education</td>
<td valign="top" align="left">Up to Secondary</td>
<td valign="top" align="center">28 (28.3)</td>
<td valign="top" align="center">2 (7.1)</td>
<td valign="top" align="center">8 (28.6)</td>
<td valign="top" align="center">8 (28.6)</td>
<td valign="top" align="center">10 (35.7)</td>
<td valign="top" align="center">0.081</td>
</tr>
<tr>
<td/>
<td valign="top" align="left">Post-Secondary/Tertiary</td>
<td valign="top" align="center">71 (71.7)</td>
<td valign="top" align="center">22 (31.0)</td>
<td valign="top" align="center">17 (23.9)</td>
<td valign="top" align="center">17 (23.9)</td>
<td valign="top" align="center">15 (21.1)</td>
<td/>
</tr>
<tr>
<td valign="top" align="left">Socio- economic status</td>
<td valign="top" align="left">Public housing</td>
<td valign="top" align="center">66 (66.7)</td>
<td valign="top" align="center">17 (25.8)</td>
<td valign="top" align="center">16 (24.2)</td>
<td valign="top" align="center">19 (28.8)</td>
<td valign="top" align="center">14 (21.2)</td>
<td valign="top" align="center">0.470</td>
</tr>
<tr>
<td/>
<td valign="top" align="left">Private housing</td>
<td valign="top" align="center">33 (33.3)</td>
<td valign="top" align="center">7 (21.2)</td>
<td valign="top" align="center">9 (27.3)</td>
<td valign="top" align="center">6 (18.2)</td>
<td valign="top" align="center">11 (33.3)</td>
<td/>
</tr>
</tbody>
</table>
</table-wrap>
<p><xref ref-type="fig" rid="F2">Figure 2</xref> shows the VR performance scores in the six cognitive domains by the four age groups. The performance score of each cognitive domain was computed based on the aggregated scores of the respective segments stipulated in the matrix (<xref ref-type="fig" rid="F2">Figure 2</xref>).</p>
<fig id="F2" position="float">
<label>FIGURE 2</label>
<caption><p>Virtual reality performance scores in each of the six cognitive domains by age group. Age Group (years): 1 = 35&#x2013;44, 2 = 45&#x2013;54, 3 = 55&#x2013;64, 4 = 65&#x2013;74.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpsyg-13-847590-g002.tif"/>
</fig>
<p><xref ref-type="table" rid="T2">Table 2</xref> summarizes the mean (SD) performance scores for each age group and the inter-age group mean score differences. Overall, younger participants achieved higher scores compared to the older participants in each cognitive domain (all <italic>p</italic> &#x003C; 0.05, <xref ref-type="table" rid="T2">Table 2</xref>). The inter-age group VR performance scores are discriminant in all six cognitive domains between the Groups (1) and (4); and discriminant in all cognitive domains except for &#x201C;Learning and Memory&#x201D; between Groups (1) and (3). Performance scores show no significant difference among the older participants in Groups (3) and (4) across the domains except for &#x201C;Social Cognition.&#x201D;</p>
<table-wrap position="float" id="T2">
<label>TABLE 2</label>
<caption><p>Virtual reality mean (SD) performance score by age group with differences and pair-wise comparisons.</p></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<td valign="top" align="left">Cognitive domain</td>
<td valign="top" align="center" colspan="4">Age group Mean (SD) score<hr/></td>
<td valign="top" align="center"><italic>P</italic>-values: <italic>F</italic>-test (Linear trend)</td>
<td valign="top" align="center" colspan="6">Pair-wise differences between age groups 95% confidence interval on difference and <italic>p</italic>-value<hr/></td>
</tr>
<tr>
<td/>
<td valign="top" align="center">(1) 35&#x2013;44 years (<italic>n</italic> = 24)</td>
<td valign="top" align="center">(2) 45&#x2013;54 years (<italic>n</italic> = 25)</td>
<td valign="top" align="center">(3) 55&#x2013;64 years (<italic>n</italic> = 25)</td>
<td valign="top" align="center">(4) 65&#x2013;74 years (<italic>n</italic> = 25)</td>
<td valign="top" align="left"/><td valign="top" align="center">(1) vs. (2)</td>
<td valign="top" align="center">(1) vs. (3)</td>
<td valign="top" align="center">(1) vs. (4)</td>
<td valign="top" align="center">(2) vs. (3)</td>
<td valign="top" align="center">(2) vs. (4)</td>
<td valign="top" align="center">(3) vs. (4)</td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Perceptual Motor</td>
<td valign="top" align="center">374.0 (29.9)</td>
<td valign="top" align="center">367.0 (29.9)</td>
<td valign="top" align="center">353.0 (29.2)</td>
<td valign="top" align="center">347.0 (45.8)</td>
<td valign="top" align="center">0.0386 (0.0044)</td>
<td valign="top" align="center">6.96 (&#x2013;13.5, 27.4) 0.501</td>
<td valign="top" align="center">21.0 (0.50, 41.4) 0.045<xref ref-type="table-fn" rid="t2fns1">&#x002A;</xref></td>
<td valign="top" align="center">27.0 (6.50, 47.4) 0.010&#x002A;&#x002A;</td>
<td valign="top" align="center">14.0 (&#x2013;6.25, 34.2) 0.173</td>
<td valign="top" align="center">20.0 (&#x2013;0.25, 40.2) 0.053</td>
<td valign="top" align="center">6.00 (&#x2013;14.2, 26.2) 0.558</td>
</tr>
<tr>
<td valign="top" align="left">Executive Function</td>
<td valign="top" align="center">377.1 (32.9)</td>
<td valign="top" align="center">346.0 (44.3)</td>
<td valign="top" align="center">314.0 (52.1)</td>
<td valign="top" align="center">315.0 (61.2)</td>
<td valign="top" align="center">&#x003C;0.0001 (&#x003C;0.0001)</td>
<td valign="top" align="center">31.1 (3.34, 58.8) 0.029<xref ref-type="table-fn" rid="t2fns1">&#x002A;</xref></td>
<td valign="top" align="center">63.1 (35.3, 90.8) &#x003C;0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">62.1 (34.3, 89.8) &#x003C;0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">32.0 (4.55, 59.5) 0.023<xref ref-type="table-fn" rid="t2fns1">&#x002A;</xref></td>
<td valign="top" align="center">31.0 (3.55, 58.5) 0.027<xref ref-type="table-fn" rid="t2fns1">&#x002A;</xref></td>
<td valign="top" align="center">&#x2013;1.00 (&#x2013;28.5, 26.5) 0.943</td>
</tr>
<tr>
<td valign="top" align="left">Complex attention</td>
<td valign="top" align="center">317.7 (46.9)</td>
<td valign="top" align="center">281.0 (44.1)</td>
<td valign="top" align="center">254.0 (55.8)</td>
<td valign="top" align="center">248.0 (66.9)</td>
<td valign="top" align="center">&#x003C;0.0001 (&#x003C;0.0001)</td>
<td valign="top" align="center">36.7 (5.95, 67.5) 0.020<xref ref-type="table-fn" rid="t2fns1">&#x002A;</xref></td>
<td valign="top" align="center">63.7 (33.0, 94.5) &#x003C;0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">69.7 (39.0, 100.5) &#x003C; 0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">27.0 (-3.44, 57.4) 0.082</td>
<td valign="top" align="center">33.0 (2.56, 63.4) 0.034<xref ref-type="table-fn" rid="t2fns1">&#x002A;</xref></td>
<td valign="top" align="center">6.00 (&#x2212;24.4, 36.4) 0.697</td>
</tr>
<tr>
<td valign="top" align="left">Social cognition</td>
<td valign="top" align="center">351.0 (27.1)</td>
<td valign="top" align="center">313.0 (53.6)</td>
<td valign="top" align="center">312.0 (44.6)</td>
<td valign="top" align="center">284.0 (59.5)</td>
<td valign="top" align="center">&#x003C;0.0001 (&#x003C;0.0001)</td>
<td valign="top" align="center">38.0 (10.8, 65.2) 0.007&#x002A;&#x002A;</td>
<td valign="top" align="center">39.0 (11.8, 66.2) 0.005&#x002A;&#x002A;</td>
<td valign="top" align="center">67.0 (39.8, 94.2) &#x003C;0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">1.00 (&#x2013;25.9, 27.9) 0.941</td>
<td valign="top" align="center">29.0 (2.09, 55.9) 0.035<xref ref-type="table-fn" rid="t2fns1">&#x002A;</xref></td>
<td valign="top" align="center">28.0 (1.09, 54.9) 0.042<xref ref-type="table-fn" rid="t2fns1">&#x002A;</xref></td>
</tr>
<tr>
<td valign="top" align="left">Learning and memory</td>
<td valign="top" align="center">342.7 (40.0)</td>
<td valign="top" align="center">328.0 (44.1)</td>
<td valign="top" align="center">316.0 (46.1)</td>
<td valign="top" align="center">303.0 (67.4)</td>
<td valign="top" align="center">0.0474 (0.0051)</td>
<td valign="top" align="center">14.7 (&#x2013;14.0, 43.4) 0.312</td>
<td valign="top" align="center">26.7 (&#x2013;2.03, 55.4) 0.068</td>
<td valign="top" align="center">39.7 (11.0, 68.4) 0.007&#x002A;&#x002A;</td>
<td valign="top" align="center">12.0 (&#x2013;16.4, 40.4) 0.404</td>
<td valign="top" align="center">25.0 (&#x2013;3.44, 53.4) 0.084</td>
<td valign="top" align="center">13.0 (&#x2013;15.4, 41.4) 0.367</td>
</tr>
<tr>
<td valign="top" align="left">Language</td>
<td valign="top" align="center">378.1 (34.0)</td>
<td valign="top" align="center">366.0 (42.6)</td>
<td valign="top" align="center">335.0 (49.0)</td>
<td valign="top" align="center">335.0 (59.5)</td>
<td valign="top" align="center">0.0020 (0.0003)</td>
<td valign="top" align="center">12.1 &#x2013;14.7, 39.0) 0.372</td>
<td valign="top" align="center">43.1 (16.3, 70.0) 0.002&#x002A;&#x002A;</td>
<td valign="top" align="center">43.1 (16.3, 70.0) 0.002&#x002A;&#x002A;</td>
<td valign="top" align="center">31.0 (4.43, 57.6) 0.023<xref ref-type="table-fn" rid="t2fns1">&#x002A;</xref></td>
<td valign="top" align="center">31.0 (4.43, 57.6) 0.023<xref ref-type="table-fn" rid="t2fns1">&#x002A;</xref></td>
<td valign="top" align="center">0.00 (&#x2013;26.6, 26.6) 1.000</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="t2fns1"><p><italic>&#x002A;, &#x002A;&#x002A;, &#x002A;&#x002A;&#x002A; Statistically significant at p &#x003C; 0.05, p &#x003C; 0.01, p &#x003C; 0.001.</italic></p></fn>
</table-wrap-foot>
</table-wrap>
<p>For specific cognitive domains (<xref ref-type="table" rid="T2">Table 2</xref>), significant differences are noted in sequential age groups from (1) to (3) in &#x201C;Executive Function&#x201D;. In the &#x201C;Perceptual Motor&#x201D; domain, the performance scores differ between the youngest participants in Group (1) and older participants in Groups (3) and (4). In &#x201C;Complex Attention&#x201D; and &#x201C;Social Cognition&#x201D; domains, differential VR performances were recorded between younger participants in Group 1 and those in Groups (2), (3), and (4). In &#x201C;Learning and Memory,&#x201D; the only significant difference is between the extreme age groups (1) and (4). In &#x201C;Language,&#x201D; significant differences in performance scores are noted between younger and older participants [i.e., Groups (1) vs. (3), (1) vs. (4), (2) vs. (3), and (2) vs. (4)].</p>
<p><xref ref-type="fig" rid="F3">Figure 3</xref> summarizes the time to complete the VR tasks in each cognitive domain. Overall, younger participants took a shorter time to complete the VR tasks compared to those in the older age groups regardless of the cognitive domain (all <italic>p</italic> &#x003C; 0.01, <xref ref-type="table" rid="T3">Table 3</xref>).</p>
<fig id="F3" position="float">
<label>FIGURE 3</label>
<caption><p>Time to complete the virtual reality tasks in each cognitive domain across the age groups. Age Group (years): 1 = 35&#x2013;44, 2 = 45&#x2013;54, 3 = 55&#x2013;64, 4 = 65&#x2013;74.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpsyg-13-847590-g003.tif"/>
</fig>
<table-wrap position="float" id="T3">
<label>TABLE 3</label>
<caption><p>Virtual reality mean (SD) completion time by age group with differences and pair-wise comparisons.</p></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<td valign="top" align="left">Cognitive domain</td>
<td valign="top" align="center" colspan="4">Age group Mean (<italic>SD</italic>) time taken (seconds)<hr/></td>
<td valign="top" align="center"><italic>P</italic>-values: <italic>F</italic>-test (Linear trend)</td>
<td valign="top" align="center" colspan="6">Pair-wise differences between age groups 95% confidence interval on difference and <italic>p</italic>-value<hr/></td>
</tr>
<tr>
<td/>
<td valign="top" align="center">(1) 35&#x2013;44 years (<italic>n</italic> = 24)</td>
<td valign="top" align="center">(2) 45&#x2013;54 years (<italic>n</italic> = 25)</td>
<td valign="top" align="center">(3) 55&#x2013;64 years (<italic>n</italic> = 25)</td>
<td valign="top" align="center">(4) 65&#x2013;74 years (<italic>n</italic> = 25)</td>
<td valign="top" align="left"/><td valign="top" align="center">(1) vs. (2)</td>
<td valign="top" align="center">(1) vs. (3)</td>
<td valign="top" align="center">(1) vs. (4)</td>
<td valign="top" align="center">(2) vs. (3)</td>
<td valign="top" align="center">(2) vs. (4)</td>
<td valign="top" align="center">(3) vs. (4)</td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left"><italic>Perceptual motor</italic></td>
<td valign="top" align="center">122.5 (37.0)</td>
<td valign="top" align="center">125.4 (32.5)</td>
<td valign="top" align="center">148.6 (36.1)</td>
<td valign="top" align="center">159.0 (38.8)</td>
<td valign="top" align="center">0.0009 (&#x003C;0.0001)</td>
<td valign="top" align="center">&#x2013;2.82 (&#x2013;23.3, 17.7) 0.786</td>
<td valign="top" align="center">&#x2013;26.1 (&#x2013;46.6, &#x2013;5.58) 0.013<xref ref-type="table-fn" rid="t3fns1">&#x002A;</xref></td>
<td valign="top" align="center">&#x2013;36.5 (&#x2013;57.0, &#x2013;15.9) &#x003C;0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;23.3 (&#x2013;43.6, &#x2013;2.97) 0.025<xref ref-type="table-fn" rid="t3fns1">&#x002A;</xref></td>
<td valign="top" align="center">&#x2013;33.6 (&#x2013;53.9, &#x2013;13.3) 0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;10.4 (&#x2013;30.7, 9.95) 0.314</td>
</tr>
<tr>
<td valign="top" align="left"><italic>Executive function</italic></td>
<td valign="top" align="center">143.9 (36.9)</td>
<td valign="top" align="center">141.2 (34.9)</td>
<td valign="top" align="center">170.7 (37.2)</td>
<td valign="top" align="center">187.5 (47.0)</td>
<td valign="top" align="center">&#x003C;0.0001 (&#x003C;0.0001)</td>
<td valign="top" align="center">2.64 (&#x2013;19.7, 24.9) 0.815</td>
<td valign="top" align="center">&#x2013;26.8 (&#x2013;49.1, &#x2013;4.51) 0.019<xref ref-type="table-fn" rid="t3fns1">&#x002A;</xref></td>
<td valign="top" align="center">&#x2013;43.6 (&#x2013;65.9, &#x2013;21.4) &#x003C;0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;29.4 (&#x2013;51.5, &#x2013;7.38) 0.010&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;46.3 (&#x2013;68.3, &#x2013;24.2) &#x003C;0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;16.8 (&#x2013;38.9, 5.22) 0.133</td>
</tr>
<tr>
<td valign="top" align="left"><italic>Complex attention</italic></td>
<td valign="top" align="center">94.3 (20.6)</td>
<td valign="top" align="center">91.6 (23.7)</td>
<td valign="top" align="center">109.2 (18.8)</td>
<td valign="top" align="center">119.1 (27.0)</td>
<td valign="top" align="center">&#x003C;0.0001 (&#x003C;0.0001)</td>
<td valign="top" align="center">2.61 (&#x2013;10.3, 15.5) 0.689</td>
<td valign="top" align="center">&#x2013;15.0 (&#x2013;27.9, &#x2013;2.07) 0.024<xref ref-type="table-fn" rid="t3fns1">&#x002A;</xref></td>
<td valign="top" align="center">&#x2013;24.8 (&#x2013;37.8, &#x2013;11.9) &#x003C;0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;17.6 (&#x2013;30.4, &#x2013;4.81) 0.008&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;27.4 (&#x2013;40.2, &#x2013;14.7) &#x003C;0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;9.84 (&#x2013;22.6, 2.95) 0.130</td>
</tr>
<tr>
<td valign="top" align="left"><italic>Social cognition</italic></td>
<td valign="top" align="center">69.8 (20.3)</td>
<td valign="top" align="center">65.0 (13.5)</td>
<td valign="top" align="center">75.9 (16.1)</td>
<td valign="top" align="center">84.3 (24.1)</td>
<td valign="top" align="center">0.0036 (0.0019)</td>
<td valign="top" align="center">4.75 (&#x2013;5.98, 15.5) 0.382</td>
<td valign="top" align="center">-6.17 (&#x2013;16.9, 4.56) 0.257</td>
<td valign="top" align="center">&#x2013;14.5 (&#x2013;25.3, &#x2013;3.80) 0.009&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;10.9 (&#x2013;21.5, &#x2013;0.30) 0.044<xref ref-type="table-fn" rid="t3fns1">&#x002A;</xref></td>
<td valign="top" align="center">&#x2013;19.3 (&#x2013;29.9, &#x2013;8.66) &#x003C; 0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;8.36 (&#x2013;19.0, 2.26) 0.1215</td>
</tr>
<tr>
<td valign="top" align="left"><italic>Learning and memory</italic></td>
<td valign="top" align="center">60.0 (14.92)</td>
<td valign="top" align="center">62.2 (13.78)</td>
<td valign="top" align="center">68.8 (51.00)</td>
<td valign="top" align="center">75.0 (45.0)</td>
<td valign="top" align="center">0.0017 (0.0002)</td>
<td valign="top" align="center">&#x2013;2.12 (&#x2013;10.3, 6.07) 0.609</td>
<td valign="top" align="center">&#x2013;8.76 (&#x2013;16.9, &#x2013;0.57) 0.036<xref ref-type="table-fn" rid="t3fns1">&#x002A;</xref></td>
<td valign="top" align="center">&#x2013;14.9 (&#x2013;23.1, &#x2013;6.73) &#x003C;0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;6.64 (&#x2013;14.7, 1.47) 0.107</td>
<td valign="top" align="center">&#x2013;12.8 (&#x2013;20.9, &#x2013;4.69) 0.002&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;6.16 (&#x2013;14.3, 1.95) 0.135</td>
</tr>
<tr>
<td valign="top" align="left"><italic>Language</italic></td>
<td valign="top" align="center">141.8 (20.0)</td>
<td valign="top" align="center">147.8 (20.0)</td>
<td valign="top" align="center">166.0 (28.6)</td>
<td valign="top" align="center">175.0 (29.2)</td>
<td valign="top" align="center">&#x003C;0.0001 (&#x003C;0.0001)</td>
<td valign="top" align="center">&#x2013;5.97 (&#x2013;20.6, 8.62) 0.419</td>
<td valign="top" align="center">&#x2013;24.2 (&#x2013;38.8, &#x2013;9.62) 0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;33.2 (&#x2013;47.8, &#x2013;18.6) &#x003C;0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;18.2 (&#x2013;32.7, &#x2013;3.80) 0.014&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;27.2 (&#x2013;41.6, &#x2013;12.8) &#x003C;0.001&#x002A;&#x002A;&#x002A;</td>
<td valign="top" align="center">&#x2013;8.96 (&#x2013;23.4, 5.48) 0.221</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="t3fns1"><p><italic>&#x002A;, &#x002A;&#x002A;, &#x002A;&#x002A;&#x002A; Statistically significant at p &#x003C; 0.05, p &#x003C; 0.01, p &#x003C; 0.001.</italic></p></fn>
</table-wrap-foot>
</table-wrap>
<p><xref ref-type="table" rid="T3">Table 3</xref> summarizes the mean (SD) completion time for each age group and the inter-age group mean score differences. The completion time is discriminant in all six cognitive domains between the younger participants in Groups (1) and (2) compared to older participants in Group (4) respectively. Significant differences in completion time are noted between Groups (1) and (3) except for &#x201C;Social Cognition&#x201D; and between Groups (2) and (3) except for &#x201C;Learning and Memory&#x201D;. Completion time is similar between the younger participants in Groups (1) and (2) across all cognitive domains (<xref ref-type="table" rid="T3">Table 3</xref>).</p>
<p>In this study, all statistical analysis methods were parametric, as well as assuming normal error distributions. The assumption of normality of the ANOVA residuals was evaluated, and subsequently found to be tenable. All comparisons were done using ANOVA <italic>F</italic>-tests, followed by <italic>t</italic>-tests for pair-wise comparisons among the four age groups.</p>
</sec>
<sec id="S5" sec-type="discussion">
<title>Discussion</title>
<p>Virtual reality is increasingly applied in cognitive assessment for neurocognitive disorders, conferring merits and advantages over the paper-and-pencil methods (<xref ref-type="bibr" rid="B11">Jin et al., 2020</xref>). The results show differentiation of the CAVIRE-based VR performance indices between the younger and older cognitively healthy adults across six cognitive domains. This study distinguishes itself from most VR studies that assessed cognitive function on older adults. <xref ref-type="bibr" rid="B5">Bottiroli et al. (2017)</xref> validated a VR platform using serious games, known as Smart Aging, for assessing cognitive functions in normal aging. Their participants were largely Italians aged 50 years and older, whose memory, executive functions, working memory, visual spatial elaboration, language, and orientation were assessed. As for the current study, it focused on the VR performance of multi-ethnic Asian adults to complete activities of daily living in an urban environment, which could better reflect the impact of cognition on their life.</p>
<p>The findings in this study show that cognitively healthy younger Asian adults tended to attain higher performance scores and completed the VR within shorter time, compared to their older counterparts. The differentiation cuts across all six domains between the age groups of 35&#x2013;44 and 65&#x2013;74. Kinesthesia, the conscious awareness of limb position or limb motion, can influence the participant&#x2019;s VR performance. <xref ref-type="bibr" rid="B10">Fry et al. (2003)</xref> reported age-related changes in upper extremity kinesthesia. According to their study, the younger participants demonstrated significantly lower thresholds of kinesthetic acuity and less error on the kinesthetic memory test compared with those who were older. Thus, the time-based VR performance provides another dimension to cognitive assessments, especially those involving executive function based on the brain-upper limbs axis.</p>
<p>Preparing a toast exemplifies such assessment of executive function, which is incorporated in segment 2 of the CAVIRE system. Such perspectives cannot be objectively evaluated via self-reporting in questionnaire-based assessments. The effect of kinesthesia in executive function alludes to the need to establish age-normalized performance indices specific to both the VR software and to the target population.</p>
<p>For executive function, significant declines in performance scores were noted from the 35&#x2013;44 years to the 55&#x2013;64 years age groups (<xref ref-type="table" rid="T2">Table 2</xref> and <xref ref-type="fig" rid="F2">Figure 2</xref>). A significant increase in the length of time to complete four tasks in this domain was noted between the 45&#x2013;54 and 55&#x2013;64 years age groups (<xref ref-type="table" rid="T3">Table 3</xref> and <xref ref-type="fig" rid="F3">Figure 3</xref>). Performing tasks such as preparing a toast, bringing along an umbrella in the context of a rainy day, crossing a road with signal from traffic lights and settling payment by selecting the correct currency notes are reflective of day-to-day executive functions. Such activities of daily living are difficult to assess without direct and objective observations in a clinical setting. Executing such simulated tasks requires reasoning and problem solving. This finding is compatible to the decline in executive function performance using VR from the fifth decade onward, as reported by <xref ref-type="bibr" rid="B20">Sakai et al. (2018)</xref>. The results of this study suggest that while the performance score decreases markedly in participants aged around mid-50s, the VR performance among the older age groups seems to be stable from 55 to 74 years of age (<xref ref-type="table" rid="T2">Table 2</xref>).</p>
<p>The VR performance scores for social cognition decreased significantly between the younger age groups and between the older age groups (<xref ref-type="table" rid="T2">Table 2</xref>). However, the VR completion time shows a distinct increase with age, shorter for the younger (up to 54 years) and longer for the older age groups (55 years and older). The selection of clothing, stall and emotions in the appropriate context are the main tasks in this cognitive domain. Broadly, social cognition includes processes such as social interactions, which are deemed appropriate in the local social context. It represents a complex integration of cognitive functions which are organized to deal with complex processing demands. The conventional paper-and-pencil cognitive assessments have limitations in evaluating social cognition. The study results suggest the potential of VR to screen for deficits in social cognition based on the performance score and completion time in people of different age groups.</p>
<p>Complex attention refers to a person&#x2019;s ability to maintain information in their mind for a short time and to manipulate that information. As the demands on attention increase, performance slows down and information is less able to be retained. Segment 4 is one of the four settings to provide insight on complex attention: a participant watches a television program while listening to the weather forecast on impending rain over the radio. The results show significant decline in performance score between the 35&#x2013;44 years and 45&#x2013;54 years age groups, and longer completion time between the 45&#x2013;54 years and 55&#x2013;64 years age groups. Complex attention seems to be similar for the older participants aged 55 years and older.</p>
<p>Language is based on the understanding of the English instructions to perform the stipulated task. Naming the fruits is a good example to demonstrate this cognitive domain. Both the performance score and completion time show significant difference between the 45&#x2013;54 and 55&#x2013;64 years age groups. Incorporating voice recognition technology in the CAVIRE software provides objective evaluation of a participant&#x2019;s articulation of the fruits&#x2019; names. Such integration of voice recognition and VR technologies to assess cognition is novel and likely synergistic. The combined applications have to be further evaluated when culturally adapted editions of CAVIRE in other languages are developed to cater to the multi-lingual Asian adults.</p>
<p>Virtual reality completion time is shown to distinguish the perceptual motor domain between the 45&#x2013;54 and 55&#x2013;64 years age groups. Activating the lift button to get to the correct floor is one of the four segments to assess perpetual motor function. The completion time is a reflection of the processing speed to carry out the task. Younger participants seem to accomplish such activity more rapidly than their older counterparts. Even the completion time varies across the younger participants in the 35&#x2013;44 and 45&#x2013;54 years age groups, although the differences are not statistically significant (<xref ref-type="fig" rid="F3">Figure 3</xref> and <xref ref-type="table" rid="T3">Table 3</xref>). <xref ref-type="bibr" rid="B25">Zhang et al. (2001)</xref> had also revealed that the VR completion time varied in the control group of younger healthy participants in a trial to compare with the VR performance of their counterparts with brain injury. Nevertheless, no difference is noted in their performance scores across the age groups. This calls for a review in the matrix score board to improve its discrimination of perceptual motor function.</p>
<p>In summary, the VR performances of Asian adults varied with age in using the CAVIRE system, with variable discrimination among the different cognitive domains. Nonetheless, users of this system should be cognizant of the differences in the performance scores and completion time between the younger and older participants, with the most significant demarcation around the age of 55 years old. Further refinements to the VR segments and scoring matrix are needed to reduce the wide dispersion of the scores and completion time.</p>
</sec>
<sec id="S6">
<title>Strengths and Limitations</title>
<p>The study shows the unprecedented ability of VR technology (CAVIRE) to discriminate the six cognitive domains according to the performance score and completion time in adults aged between 35 and 74 years old with healthy cognition (based on the MoCA). The CAVIRE system also shows significant differences in VR performance across the age groups, regardless of the cognitive domains. The voice recognition incorporated in the VR system is novel and allows objective assessment of language. Older participants were receptive of using the headset and completed the VR tasks without experiencing any adverse effects. No participants dropped out from the study due to failure to use the VR equipment. The embedded automated scoring system standardizes the appraisal of the VR tasks and measures the associated completion time. The results of this study complement that of a previous publication on CAVIRE, where the entire completion time for CAVIRE was significantly shorter compared to that of MoCA (<xref ref-type="bibr" rid="B23">Wong et al., 2021</xref>). These results show that CAVIRE is feasible to be used for cognitive assessment in the six cognitive domains for individuals across different age groups.</p>
<p>Nevertheless, the study has its limitations. The limited number of participants in each age group restricts the generalizability of this study to the general population. Cognitively healthy participants were identified using only a single MoCA cut-off score of 26. According to a multi-ethnic meta-analysis, a MoCA cut-off of 26 may not be suitable (<xref ref-type="bibr" rid="B7">Carson et al., 2017</xref>). However, it is important to note that the meta-analysis was based on different ethnic populations, which also included non-Asians. This current study on Asian individuals is contextualized to Singapore, in which a MoCA cut-off score of 26 is widely used (<xref ref-type="bibr" rid="B14">Ng et al., 2013</xref>).</p>
<p>Although no significant difference was observed in the basic demographic characteristics of the participants across the age groups, including education level, it was noted that a higher proportion of the younger participants (35&#x2013;44 years old) received post-secondary/tertiary education. Nevertheless, it is possible that the effect of education was not accentuated, since the tasks in the CAVIRE assessment consist of common activities of daily living. Other studies on VR have also shown that the effect of education is not significant (<xref ref-type="bibr" rid="B15">Oliveira et al., 2018</xref>). In the current study, participants were stratified according to different age groups, but due to the limited sample size, stratification according to education level was not possible. Potential confounders such as intellectual capacity, occupations and intra-age group variations were also not addressed in this study.</p>
<p>The performance scores for each cognitive domain were aggregated based on four different segments to mitigate execution variations and functional assessment in a single segment. The segments will require review and refinement to better differentiate the cognitive functions. An adequately powered randomized controlled trial using an enhanced edition of CAVIRE system will be developed to attest its discrimination in cognitive assessment across the different age groups, compared to conventional paper-and-pencil-based assessments.</p>
</sec>
<sec id="S7" sec-type="conclusion">
<title>Conclusion</title>
<p>The study shows that the VR performance scores and completion time of using the fully immersive and automated CAVIRE system significantly differ between the younger and older Asian adults with healthy cognition. The system has shown potential to assess adults across the six cognitive domains although its degree of discrimination varies between domains. Enhancements to the VR segments and matrix score are needed to establish the age group-specific normal performance indices.</p>
</sec>
<sec id="S8" sec-type="data-availability">
<title>Data Availability Statement</title>
<p>The original contributions presented in the study are included in the article, further inquiries can be directed to the corresponding author.</p>
</sec>
<sec id="S9">
<title>Ethics Statement</title>
<p>The studies involving human participants were reviewed and approved by SingHealth Centralised Institutional Review Board, Singapore. The patients/participants provided their written informed consent to participate in this study.</p>
</sec>
<sec id="S10">
<title>Author Contributions</title>
<p>JL, WW, JQ, and NT designed the VR performance tasks and score matrix and the study protocol. RM reviewed the face validity of the 13 segments for cognitive assessment. TT and SL collaborated with FXMedia Internet Pte Ltd, the industrial collaborator to develop the VR system, and including the voice recognition component. WW and NT secured the funding. PM and JL conducted the study and collated and audited the data. JA analyzed the data. NT, JA, JL, RM, WW, and JQ interpreted the results. JL and NT drafted the manuscript. All the authors reviewed, critiqued, and revised the draft, finalized and approved before submitting the manuscript to the journal.</p>
</sec>
<sec id="conf1" sec-type="COI-statement">
<title>Conflict of Interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="pudiscl1" sec-type="disclaimer">
<title>Publisher&#x2019;s Note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
</body>
<back>
<sec id="S11" sec-type="funding-information">
<title>Funding</title>
<p>This study was funded by the Mitsui Sumitomo Insurance Welfare Foundation Research Grant 2019, the SingHealth Polyclinics Seed Funding (SHP-SEED53-2019[4]) and the AM-ETHOS Duke-NUS Medical Student Fellowship Award (AM-ETHOS01/FY2019/34-A34). The software development was funded by the Infocomm Media Development Authority (IMDA) of Singapore. The funds were used to procure the VR equipment, consumables and reimburse the research participants.</p>
</sec>
<ack>
<p>The authors would like to acknowledge IMDA, the efforts of the VR software developers and graphic designers from FX Media Internet Pte Ltd for their help in developing the CAVIRE system. The authors also grateful to the administrative team in the Research Department in SHP who assisted in the ethics review board application and the procurement of the equipment.</p>
</ack>
<app-group>
<app id="A1">
<title>Appendix</title>
<table-wrap position="float" id="T4">
<label>TABLE A1</label>
<caption><p>Scoring algorithm of the VR assessment.</p></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<td valign="top" align="left">Segment</td>
<td valign="top" align="left">Task</td>
<td valign="top" align="center" colspan="5">Score<hr/></td>
<td valign="top" align="left">Remarks</td>
<td valign="top" align="left">Cognitive Domain(s) assessed</td>
<td valign="top" align="center">Given time (sec)</td>
</tr>
<tr>
<td/>
<td valign="top" align="left"/><td valign="top" align="left">0</td>
<td valign="top" align="left">25</td>
<td valign="top" align="left">50</td>
<td valign="top" align="left">75</td>
<td valign="top" align="left">100</td>
<td valign="top" align="left"/><td valign="top" align="left"/><td valign="top" align="left"/></tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">1</td>
<td valign="top" align="left">Following step-by-step instructions:<break/> (1) Squeeze toothpaste on toothbrush<break/> (2) Brush teeth<break/> (3) Rinse mouth</td>
<td valign="top" align="left">No attempt</td>
<td valign="top" align="left">Attempts, but unable to complete any Tasks</td>
<td valign="top" align="left">Complete 1 Task</td>
<td valign="top" align="left">Complete 2 Tasks</td>
<td valign="top" align="left">Complete all 3 Tasks</td>
<td valign="top" align="left">NIL</td>
<td valign="top" align="left">Perceptual motor Language</td>
<td valign="top" align="center">90</td>
</tr>
<tr>
<td valign="top" align="left">2</td>
<td valign="top" align="left">Preparing peanut butter bread without specific instructions:<break/> (1) Open peanut butter jar<break/> (2) Take peanut butter using knife<break/> (3) Spread peanut butter on bread</td>
<td valign="top" align="left">No attempt</td>
<td valign="top" align="left">Attempts, but unable to complete any Tasks</td>
<td valign="top" align="left">Complete 1 Task</td>
<td valign="top" align="left">Complete 2 Tasks</td>
<td valign="top" align="left">Complete all 3 Tasks</td>
<td valign="top" align="left">NIL</td>
<td valign="top" align="left">Perceptual-motor Executive function</td>
<td valign="top" align="center">90</td>
</tr>
<tr>
<td valign="top" align="left">3</td>
<td valign="top" align="left">Identify 3 images of important persons in the newspaper:<break/> (1) Lee Kuan Yew (2) Halimah Yacob (3) Goh Chok Tong</td>
<td valign="top" align="left">No attempt</td>
<td valign="top" align="left">Attempts, but unable to identify any images correctly</td>
<td valign="top" align="left">Identify 1 image correctly</td>
<td valign="top" align="left">Identify 2 images correctly</td>
<td valign="top" align="left">Identify all 3 images correctly</td>
<td valign="top" align="left">NIL</td>
<td valign="top" align="left">Learning and memory Language</td>
<td valign="top" align="center">30</td>
</tr>
<tr>
<td valign="top" align="left">4</td>
<td valign="top" align="left">(1) Remember to take umbrella before leaving the house later<break/> &#x003E;&#x003E;&#x003E; Television acts as a distractor<break/> &#x003E;&#x003E;&#x003E; Radio gives weather forecast of rain</td>
<td valign="top" align="left">NIL</td>
<td valign="top" align="left">NIL</td>
<td valign="top" align="left">NIL</td>
<td valign="top" align="left">NIL</td>
<td valign="top" align="left">NIL</td>
<td valign="top" align="left">This Task will be scored in Segment 7(b)</td>
<td valign="top" align="left">Complex attention Learning and memory</td>
<td valign="top" align="center">NA</td>
</tr>
<tr>
<td valign="top" align="left">5</td>
<td valign="top" align="left">(1) Name the five fruits by reading out aloud: Apple, Banana, Watermelon, Mango, Durian<break/> (2) Remember the five fruits</td>
<td valign="top" align="left">No attempt</td>
<td valign="top" align="left">Attempts, but unable to name any fruits correctly</td>
<td valign="top" align="left">Name 1&#x2013;2 fruits correctly</td>
<td valign="top" align="left">Name 3&#x2013;4 fruits correctly</td>
<td valign="top" align="left">Name 5 fruits correctly</td>
<td valign="top" align="left">Task #2 will be scored in Segment 11</td>
<td valign="top" align="left">Learning and memory Language</td>
<td valign="top" align="center">50</td>
</tr>
<tr>
<td valign="top" align="left">6</td>
<td valign="top" align="left">(1) Choose appropriate clothing (female/male) to go out for shopping</td>
<td valign="top" align="left">No attempt</td>
<td valign="top" align="left">Attempts, but unable to choose the correct clothing</td>
<td valign="top" align="left">Choose the correct clothing in 2 attempts or more</td>
<td valign="top" align="left">Choose the correct clothing in 1 attempt, with a time of 15 s or more</td>
<td valign="top" align="left">Choose the correct clothing in 1 attempt, with a time of less than 15 s</td>
<td valign="top" align="left">NIL</td>
<td valign="top" align="left">Social cognition</td>
<td valign="top" align="center">30</td>
</tr>
<tr>
<td valign="top" align="left">7(a)</td>
<td valign="top" align="left">(1) Open the door<break/> (2) Select the correct item to lock the door<break/> (3) Lock the door</td>
<td valign="top" align="left">No attempt</td>
<td valign="top" align="left">Attempts, but unable to complete any Tasks</td>
<td valign="top" align="left">Complete 1 Task</td>
<td valign="top" align="left">Complete 2 Tasks</td>
<td valign="top" align="left">Complete all 3 Tasks</td>
<td valign="top" align="left">NIL</td>
<td valign="top" align="left">Perceptual-motor Executive function</td>
<td valign="top" align="center">60</td>
</tr>
<tr>
<td valign="top" align="left">7(b)</td>
<td valign="top" align="left">Remember to take umbrella before leaving the house<break/> &#x003E;&#x003E;&#x003E; Hint given at two time points:<break/> (i) before locking the door<break/> (ii) after locking the door</td>
<td valign="top" align="left">Does not remember to take the umbrella at all</td>
<td valign="top" align="left">Remember to take the umbrella after closing the door outside the house (hint is given for the second time)</td>
<td valign="top" align="left">Remember to take the umbrella before closing the door outside the house (hint is given for the first time)</td>
<td valign="top" align="left">Remember to take the umbrella after opening the door inside the house (before any hints are given)</td>
<td valign="top" align="left">Remember to take the umbrella before opening the door inside the house (before any hints are given)</td>
<td valign="top" align="left">Continued from Segment 4</td>
<td valign="top" align="left">Continued from Segment 4</td>
<td valign="top" align="center">NA</td>
</tr>
<tr>
<td valign="top" align="left">8</td>
<td valign="top" align="left">(1) Press button to go down (outside lift)<break/> (2) Press button for Level 1 (inside lift)</td>
<td valign="top" align="left">No attempt</td>
<td valign="top" align="left">Attempts, but unable to complete any Tasks</td>
<td valign="top" align="left">Complete both Tasks in 2 attempts or more respectively</td>
<td valign="top" align="left">Complete 1 Task in 1 attempt and Complete the other Task in 2 attempts or more</td>
<td valign="top" align="left">Complete Task #1 in 1 attempt and Complete Task #2 in 1 attempt</td>
<td valign="top" align="left">NIL</td>
<td valign="top" align="left">Perceptual-motor</td>
<td valign="top" align="center">20</td>
</tr>
<tr>
<td valign="top" align="left">9</td>
<td valign="top" align="left">(1) Press &#x201C;Start&#x201D; to cross after traffic light turns green<break/> (2) Looks to the left before crossing<break/> (3) Looks to the right before crossing</td>
<td valign="top" align="left">No attempt</td>
<td valign="top" align="left">Attempts, but unable to complete any Tasks</td>
<td valign="top" align="left">Complete 1 Task</td>
<td valign="top" align="left">Complete 2 Tasks</td>
<td valign="top" align="left">Complete all 3 Tasks</td>
<td valign="top" align="left">NIL</td>
<td valign="top" align="left">Executive function Complex attention Social cognition</td>
<td valign="top" align="center">60</td>
</tr>
<tr>
<td valign="top" align="left">10</td>
<td valign="top" align="left">Choose the correct store:<break/> i.e., clothes, electrical appliances, vegetables, fruits</td>
<td valign="top" align="left">No attempt</td>
<td valign="top" align="left">Attempts, but unable to choose the correct store</td>
<td valign="top" align="left">Choose the correct store in 2 attempts or more</td>
<td valign="top" align="left">Choose the correct store in 1 attempt, with a total time of 15 s or more</td>
<td valign="top" align="left">Choose the correct store in 1 attempt, with a total time of less than 15 s</td>
<td valign="top" align="left">NIL</td>
<td valign="top" align="left">Complex attention Social cognition</td>
<td valign="top" align="center">40</td>
</tr>
<tr>
<td valign="top" align="left">11</td>
<td valign="top" align="left">Choose the five fruits based on the previous shopping list:<break/> i.e., Apple, Banana, Watermelon, Mango, Durian</td>
<td valign="top" align="left">No attempt</td>
<td valign="top" align="left">Attempts, but unable to choose any fruits correctly</td>
<td valign="top" align="left">Choose three fruits or less correctly</td>
<td valign="top" align="left">Choose four fruits correctly</td>
<td valign="top" align="left">Choose five fruits correctly</td>
<td valign="top" align="left">Continued from Segment 5</td>
<td valign="top" align="left">Learning and memory</td>
<td valign="top" align="center">50</td>
</tr>
<tr>
<td valign="top" align="left">12</td>
<td valign="top" align="left">Following step-by-step instructions:<break/>(1) Calculate total price of the five fruits<break/> (2) Pay exact amount of money</td>
<td valign="top" align="left">No attempt</td>
<td valign="top" align="left">Attempts, but unable to complete any Tasks</td>
<td valign="top" align="left">Complete both Tasks in 2 attempts or more respectively</td>
<td valign="top" align="left">Complete 1 Task in 1 attempt, and Complete the other Task in 2 attempts or more</td>
<td valign="top" align="left">Complete Task #1 in 1 attempt, and Complete Task #2 in 1 attempt</td>
<td valign="top" align="left">NIL</td>
<td valign="top" align="left">Executive function Complex attention Language</td>
<td valign="top" align="center">90</td>
</tr>
<tr>
<td valign="top" align="left">13</td>
<td valign="top" align="left">Choose the correct emotion with regards to the scene:<break/> (1) Birthday party<break/> (2) Car accident</td>
<td valign="top" align="left">No attempt</td>
<td valign="top" align="left">Attempts, but unable to complete any Tasks</td>
<td valign="top" align="left">Complete both Tasks in 2 attempts or more respectively</td>
<td valign="top" align="left">Complete 1 Task in 1 attempt, and Complete the other Task in 2 attempts or more</td>
<td valign="top" align="left">Complete Task #1 in 1 attempt, and Complete Task #2 in 1 attempt</td>
<td valign="top" align="left">NIL</td>
<td valign="top" align="left">Social cognition</td>
<td valign="top" align="center">30</td>
</tr>
</tbody>
</table>
</table-wrap>
</app>
</app-group>
<ref-list>
<title>References</title>
<ref id="B1"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Abd Razak</surname> <given-names>M. A.</given-names></name> <name><surname>Ahmad</surname> <given-names>N. A.</given-names></name> <name><surname>Chan</surname> <given-names>Y. Y.</given-names></name> <name><surname>Mohamad Kasim</surname> <given-names>N.</given-names></name> <name><surname>Yusof</surname> <given-names>M.</given-names></name> <name><surname>Abdul Ghani</surname> <given-names>M. K. A.</given-names></name><etal/></person-group> (<year>2019</year>). <article-title>Validity of screening tools for dementia and mild cognitive impairment among the elderly in primary health care: a systematic review.</article-title> <source><italic>Public Health</italic></source> <volume>169</volume> <fpage>84</fpage>&#x2013;<lpage>92</lpage>. <pub-id pub-id-type="doi">10.1016/j.puhe.2019.01.001</pub-id></citation></ref>
<ref id="B2"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Acevedo</surname> <given-names>A.</given-names></name> <name><surname>Loewenstein</surname> <given-names>D. A.</given-names></name> <name><surname>Agr&#x00F3;n</surname> <given-names>J.</given-names></name> <name><surname>Duara</surname> <given-names>R.</given-names></name></person-group> (<year>2007</year>). <article-title>Influence of sociodemographic variables on neuropsychological test performance in Spanish-speaking older adults.</article-title> <source><italic>J. Clin. Exper. Neuropsychol.</italic></source> <volume>29</volume> <fpage>530</fpage>&#x2013;<lpage>544</lpage>. <pub-id pub-id-type="doi">10.1080/13803390600814740</pub-id> <pub-id pub-id-type="pmid">17564918</pub-id></citation></ref>
<ref id="B3"><citation citation-type="journal"><collab>American Psychiatric Association.</collab> (<year>2013</year>). <source><italic>Diagnostic and Statistical Manual of Mental Disorders, 5th Edn (DSM-5).</italic></source> <publisher-loc>Arlington, VA</publisher-loc>: <publisher-name>American Psychiatric Association</publisher-name>, <pub-id pub-id-type="doi">10.1176/appi.books.9780890425596</pub-id></citation></ref>
<ref id="B4"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Billingham</surname> <given-names>S. A.</given-names></name> <name><surname>Whitehead</surname> <given-names>A. L.</given-names></name> <name><surname>Julious</surname> <given-names>S. A.</given-names></name></person-group> (<year>2013</year>). <article-title>An audit of sample sizes for pilot and feasibility trials being undertaken in the United Kingdom registered in the United Kingdom Clinical Research Network database.</article-title> <source><italic>BMC Med. Res. Methodol.</italic></source> <volume>13</volume>:<fpage>104</fpage>. <pub-id pub-id-type="doi">10.1186/1471-2288-13-104</pub-id></citation></ref>
<ref id="B5"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bottiroli</surname> <given-names>S.</given-names></name> <name><surname>Tassorelli</surname> <given-names>C.</given-names></name> <name><surname>Lamonica</surname> <given-names>M.</given-names></name> <name><surname>Zucchella</surname> <given-names>C.</given-names></name> <name><surname>Cavallini</surname> <given-names>E.</given-names></name> <name><surname>Bernini</surname> <given-names>S.</given-names></name><etal/></person-group> (<year>2017</year>). <article-title>Smart Aging Platform for Evaluating Cognitive Functions in Aging: A Comparison with the MoCA in a Normal Population.</article-title> <source><italic>Front. Aging Neurosci.</italic></source> <volume>9</volume>:<fpage>379</fpage>. <pub-id pub-id-type="doi">10.3389/fnagi.2017.00379</pub-id></citation></ref>
<ref id="B6"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Brasure</surname> <given-names>M.</given-names></name> <name><surname>Desai</surname> <given-names>P.</given-names></name> <name><surname>Davila</surname> <given-names>H.</given-names></name> <name><surname>Nelson</surname> <given-names>V. A.</given-names></name> <name><surname>Calvert</surname> <given-names>C.</given-names></name> <name><surname>Jutkowitz</surname> <given-names>E.</given-names></name><etal/></person-group> (<year>2018</year>). <article-title>Physical Activity Interventions in Preventing Cognitive Decline and Alzheimer-Type Dementia: A Systematic Review.</article-title> <source><italic>Ann. Internal Med.</italic></source> <volume>168</volume> <fpage>30</fpage>&#x2013;<lpage>38</lpage>. <pub-id pub-id-type="doi">10.7326/M17-1528</pub-id> <pub-id pub-id-type="pmid">29255839</pub-id></citation></ref>
<ref id="B7"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Carson</surname> <given-names>N.</given-names></name> <name><surname>Leach</surname> <given-names>L.</given-names></name> <name><surname>Murphy</surname> <given-names>K. J.</given-names></name></person-group> (<year>2017</year>). <article-title>A re-examination of Montreal Cognitive Assessment (MoCA) cutoff scores.</article-title> <source><italic>Int. J. Geriatric Psychiatry</italic></source> <volume>33</volume> <fpage>379</fpage>&#x2013;<lpage>388</lpage>. <pub-id pub-id-type="doi">10.1002/gps.4756</pub-id></citation></ref>
<ref id="B8"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Davison</surname> <given-names>S. M. C.</given-names></name> <name><surname>Deeprose</surname> <given-names>C.</given-names></name> <name><surname>Terbeck</surname> <given-names>S.</given-names></name></person-group> (<year>2017</year>). <article-title>A comparison of immersive virtual reality with traditional neuropsychological measures in the assessment of executive functions.</article-title> <source><italic>Acta Neuropsychiatrica</italic></source> <volume>30</volume> <fpage>79</fpage>&#x2013;<lpage>89</lpage>. <pub-id pub-id-type="doi">10.1017/neu.2017.14</pub-id> <pub-id pub-id-type="pmid">28482936</pub-id></citation></ref>
<ref id="B9"><citation citation-type="journal"><collab>Department of Statistics, Ministry of Trade and Industry, and Republic of Singapore.</collab> (<year>2019</year>). <source><italic>Population Trends.</italic></source> Available online at: <ext-link ext-link-type="uri" xlink:href="https://www.singstat.gov.sg/-/media/files/publications/population/population2019.pdf">https://www.singstat.gov.sg/-/media/files/publications/population/population2019.pdf</ext-link> <comment>(accessed on Feb 25, 2020)</comment></citation></ref>
<ref id="B10"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fry</surname> <given-names>D. K</given-names></name> <name><surname>Campbell</surname> <given-names>J</given-names></name> <name><surname>Foltz</surname> <given-names>B</given-names></name> <name><surname>Macek</surname> <given-names>R.</given-names></name></person-group> (<year>2003</year>). <article-title>Age-related changes in upper extremity kinesthesis.</article-title> <source><italic>Phys. Occupat. Ther. Geriatrics</italic></source> <volume>20</volume> <fpage>137</fpage>&#x2013;<lpage>154</lpage>. <pub-id pub-id-type="doi">10.1080/J148v20n03_09</pub-id></citation></ref>
<ref id="B11"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jin</surname> <given-names>R.</given-names></name> <name><surname>Pilozzi</surname> <given-names>A.</given-names></name> <name><surname>Huang</surname> <given-names>X.</given-names></name></person-group> (<year>2020</year>). <article-title>Current Cognition Tests, Potential Virtual Reality Applications, and Serious Games in Cognitive Assessment and Non-Pharmacological Therapy for Neurocognitive Disorders.</article-title> <source><italic>J. Clin. Med</italic></source> <volume>2020</volume>:<fpage>3287</fpage>. <pub-id pub-id-type="doi">10.3390/jcm9103287</pub-id></citation></ref>
<ref id="B12"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Larouche</surname> <given-names>E.</given-names></name> <name><surname>Tremblay</surname> <given-names>M. P.</given-names></name> <name><surname>Potvin</surname> <given-names>O.</given-names></name> <name><surname>Laforest</surname> <given-names>S.</given-names></name> <name><surname>Bergeron</surname> <given-names>D.</given-names></name> <name><surname>Laforce</surname> <given-names>R.</given-names></name><etal/></person-group> (<year>2016</year>). <article-title>Normative Data for the Montreal Cognitive Assessment in Middle-Aged and Elderly Quebec-French People.</article-title> <source><italic>Arch. Clin. Neuropsychol.</italic></source> <volume>31</volume> <fpage>819</fpage>&#x2013;<lpage>826</lpage>. <pub-id pub-id-type="doi">10.1093/arclin/acw076</pub-id> <pub-id pub-id-type="pmid">27625048</pub-id></citation></ref>
<ref id="B13"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lim</surname> <given-names>J. E.</given-names></name> <name><surname>Wong</surname> <given-names>W. T.</given-names></name> <name><surname>Teh</surname> <given-names>T. A.</given-names></name> <name><surname>Lim</surname> <given-names>S. H.</given-names></name> <name><surname>Allen</surname> <given-names>J. C.</given-names> <suffix>Jr.</suffix></name> <name><surname>Quah</surname> <given-names>J. H. M.</given-names></name><etal/></person-group> (<year>2021</year>). <article-title>&#x201C;A Fully-Immersive and Automated Virtual Reality System to Assess the Six Domains of Cognition: Protocol for a Feasibility Study.</article-title> <source><italic>Front. Aging Neurosci.</italic></source> <volume>12</volume>:<fpage>604670</fpage>. <pub-id pub-id-type="doi">10.3389/fnagi.2020.604670</pub-id></citation></ref>
<ref id="B14"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ng</surname> <given-names>A.</given-names></name> <name><surname>Chew</surname> <given-names>I.</given-names></name> <name><surname>Narasimhalu</surname> <given-names>K.</given-names></name> <name><surname>Kandiah</surname> <given-names>N.</given-names></name></person-group> (<year>2013</year>). <article-title>Effectiveness of Montreal Cognitive Assessment for the diagnosis of mild cognitive impairment and mild Alzheimer&#x2019;s disease in Singapore.</article-title> <source><italic>Singapore Med. J.</italic></source> <volume>54</volume> <fpage>616</fpage>&#x2013;<lpage>619</lpage>. <pub-id pub-id-type="doi">10.11622/smedj.2013220</pub-id></citation></ref>
<ref id="B15"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Oliveira</surname> <given-names>C. R.</given-names></name> <name><surname>Lopes Filho</surname> <given-names>B.</given-names></name> <name><surname>Esteves</surname> <given-names>C. S.</given-names></name> <name><surname>Rossi</surname> <given-names>T.</given-names></name> <name><surname>Nunes</surname> <given-names>D. S.</given-names></name> <name><surname>Lima</surname> <given-names>M.</given-names></name><etal/></person-group> (<year>2018</year>). <article-title>Neuropsychological Assessment of Older Adults With Virtual Reality: Association of Age, Schooling, and General Cognitive Status.</article-title> <source><italic>Front. Psychol.</italic></source> <volume>9</volume>:<fpage>1085</fpage>. <pub-id pub-id-type="doi">10.3389/fpsyg.2018.01085</pub-id></citation></ref>
<ref id="B16"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ouellet</surname> <given-names>&#x00C9;</given-names></name> <name><surname>Boller</surname> <given-names>B.</given-names></name> <name><surname>Corriveau-Lecavalier</surname> <given-names>N.</given-names></name> <name><surname>Cloutier</surname> <given-names>S.</given-names></name> <name><surname>Belleville</surname> <given-names>S.</given-names></name><etal/></person-group> (<year>2018</year>). <article-title>The Virtual Shop: A new immersive virtual reality environment and scenario for the assessment of everyday memory.</article-title> <source><italic>J. Neurosci. Methods</italic></source> <volume>303</volume> <fpage>126</fpage>&#x2013;<lpage>135</lpage>. <pub-id pub-id-type="doi">10.1016/j.jneumeth.2018.03.010</pub-id> <pub-id pub-id-type="pmid">29581009</pub-id></citation></ref>
<ref id="B17"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Parsons</surname> <given-names>T. D.</given-names></name></person-group> (<year>2015</year>). <article-title>&#x201C;Virtual Reality for Enhanced Ecological Validity and Experimental Control in the Clinical, Affective and Social Neurosciences.&#x201D;.</article-title> <source><italic>Front. Hum. Neurosci.</italic></source> <volume>9</volume>:<fpage>660</fpage>. <pub-id pub-id-type="doi">10.3389/fnhum.2015.00660</pub-id></citation></ref>
<ref id="B18"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Piccinin</surname> <given-names>A. M.</given-names></name> <name><surname>Muniz-Terrera</surname> <given-names>G.</given-names></name> <name><surname>Clouston</surname> <given-names>S.</given-names></name> <name><surname>Reynolds</surname> <given-names>C. A.</given-names></name> <name><surname>Thorvaldsson</surname> <given-names>V.</given-names></name> <name><surname>Deary</surname> <given-names>I. J.</given-names></name><etal/></person-group> (<year>2013</year>). <article-title>Coordinated analysis of age, sex, and education effects on change in MMSE scores.</article-title> <source><italic>J. Gerontol. Ser. B Psychol. Sci. Soc. Sci.</italic></source> <volume>68</volume> <fpage>374</fpage>&#x2013;<lpage>390</lpage>. <pub-id pub-id-type="doi">10.1093/geronb/gbs077</pub-id> <pub-id pub-id-type="pmid">23033357</pub-id></citation></ref>
<ref id="B19"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Plechat&#x00E1;</surname> <given-names>A.</given-names></name> <name><surname>Sahula</surname> <given-names>V.</given-names></name> <name><surname>Fayette</surname> <given-names>D.</given-names></name> <name><surname>Fajnerov&#x00E1;</surname> <given-names>I.</given-names></name><etal/></person-group> (<year>2019</year>). <article-title>Age-Related Differences With Immersive and Non-immersive Virtual Reality in Memory Assessment.</article-title> <source><italic>Front. Psychol.</italic></source> <volume>10</volume>:<fpage>1330</fpage>. <pub-id pub-id-type="doi">10.3389/fpsyg.2019.01330</pub-id></citation></ref>
<ref id="B20"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sakai</surname> <given-names>H.</given-names></name> <name><surname>Nagano</surname> <given-names>A.</given-names></name> <name><surname>Seki</surname> <given-names>K.</given-names></name> <name><surname>Okahashi</surname> <given-names>S.</given-names></name> <name><surname>Kojima</surname> <given-names>M.</given-names></name> <name><surname>Luo</surname> <given-names>Z.</given-names></name><etal/></person-group> (<year>2018</year>). <article-title>Development of a cognitive function test using virtual reality technology: examination in healthy participants.</article-title> <source><italic>Aging Neuropsychol. Cognition</italic></source> <volume>25</volume> <fpage>561</fpage>&#x2013;<lpage>575</lpage>. <pub-id pub-id-type="doi">10.1080/13825585.2017.1351916</pub-id> <pub-id pub-id-type="pmid">28701070</pub-id></citation></ref>
<ref id="B21"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Spooner</surname> <given-names>D. M.</given-names></name> <name><surname>Pachana</surname> <given-names>N. A.</given-names></name></person-group> (<year>2006</year>). <article-title>&#x201C;Ecological validity in neuropsychological assessment: a case for greater consideration in research with neurologically intact populations.&#x201D;</article-title> <source><italic>Arch. Clin. Neuropsychol.</italic></source> <volume>21</volume> <fpage>327</fpage>&#x2013;<lpage>337</lpage>. <pub-id pub-id-type="doi">10.1016/j.acn.2006.04.004</pub-id></citation></ref>
<ref id="B22"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Vallejo</surname> <given-names>V.</given-names></name> <name><surname>Wyss</surname> <given-names>P.</given-names></name> <name><surname>Rampa</surname> <given-names>L.</given-names></name> <name><surname>Mitache</surname> <given-names>A. V.</given-names></name> <name><surname>M&#x00FC;ri</surname> <given-names>R. M.</given-names></name> <name><surname>Mosimann</surname> <given-names>U. P.</given-names></name><etal/></person-group> (<year>2017</year>). <article-title>Evaluation of a novel Serious Game based assessment tool for patients with Alzheimer&#x2019;s disease.</article-title> <source><italic>PLoS One</italic></source> <volume>12</volume>:<fpage>e0175999</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0175999</pub-id></citation></ref>
<ref id="B23"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wong</surname> <given-names>W. T.</given-names></name> <name><surname>Tan</surname> <given-names>N. C.</given-names></name> <name><surname>Lim</surname> <given-names>J. E.</given-names></name> <name><surname>Allen</surname> <given-names>J. C.</given-names> <suffix>Jr.</suffix></name> <name><surname>Lee</surname> <given-names>W. S.</given-names></name> <name><surname>Quah</surname> <given-names>J. H. M.</given-names></name><etal/></person-group> (<year>2021</year>). <article-title>Comparison of Time Taken to Assess Cognitive Function Using a Fully Immersive and Automated Virtual Reality System vs. the Montreal Cognitive Assessment.</article-title> <source><italic>Front. Aging Neurosci.</italic></source> <volume>13</volume>:<fpage>756891</fpage>. <pub-id pub-id-type="doi">10.3389/fnagi.2021.756891</pub-id></citation></ref>
<ref id="B24"><citation citation-type="journal"><collab>World Health Organisation.</collab> (<year>2019</year>). <source><italic>Dementia.</italic></source> Available online at :<ext-link ext-link-type="uri" xlink:href="https://www.who.int/news-room/fact-sheets/detail/dementia">https://www.who.int/news-room/fact-sheets/detail/dementia</ext-link> <comment>(accessed May 14, 2019)</comment></citation></ref>
<ref id="B25"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>L.</given-names></name> <name><surname>Abreu</surname> <given-names>B. C.</given-names></name> <name><surname>Masel</surname> <given-names>B.</given-names></name> <name><surname>Scheibel</surname> <given-names>R. S.</given-names></name> <name><surname>Christiansen</surname> <given-names>C. H.</given-names></name> <name><surname>Huddleston</surname> <given-names>N.</given-names></name><etal/></person-group> (<year>2001</year>). <article-title>Virtual reality in the Assessment for selected Cognitive Functions after Brain Injury.</article-title> <source><italic>Am. J. Phys. Med Rehabil.</italic></source> <volume>80</volume> <fpage>597</fpage>&#x2013;<lpage>604</lpage>.</citation></ref>
</ref-list>
</back>
</article>
