<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<?covid-19-tdm?>
<article xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Hum. Neurosci.</journal-id>
<journal-title>Frontiers in Human Neuroscience</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Hum. Neurosci.</abbrev-journal-title>
<issn pub-type="epub">1662-5161</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fnhum.2022.993606</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Neuroscience</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Neural dynamics during emotional video engagement relate to anxiety</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name><surname>Nan</surname> <given-names>Jason</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1918434/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Balasubramani</surname> <given-names>Pragathi P.</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/98403/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Ramanathan</surname> <given-names>Dhakshin</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1038629/overview"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Mishra</surname> <given-names>Jyoti</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="corresp" rid="c002"><sup>&#x002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1152718/overview"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Neural Engineering and Translation Labs, Department of Psychiatry, University of California</institution>, <addr-line>San Diego, San Diego, CA</addr-line>, <country>United States</country></aff>
<aff id="aff2"><sup>2</sup><institution>Department of Bioengineering, University of California</institution>, <addr-line>San Diego, San Diego, CA</addr-line>, <country>United States</country></aff>
<aff id="aff3"><sup>3</sup><institution>Department of Cognitive Science, Indian Institute of Technology Kanpur</institution>, <addr-line>Kanpur</addr-line>, <country>India</country></aff>
<aff id="aff4"><sup>4</sup><institution>Department of Mental Health, VA San Diego Medical Center</institution>, <addr-line>San Diego, CA</addr-line>, <country>United States</country></aff>
<aff id="aff5"><sup>5</sup><institution>Center of Excellence for Stress and Mental Health, VA San Diego Medical Center</institution>, <addr-line>San Diego, CA</addr-line>, <country>United States</country></aff>
<author-notes>
<fn fn-type="edited-by"><p>Edited by: Irina Strigo, University of California, San Francisco, United States</p></fn>
<fn fn-type="edited-by"><p>Reviewed by: Shuang Liu, Tianjin University, China; Blair Kaneshiro, Stanford University, United States</p></fn>
<corresp id="c001">&#x002A;Correspondence: Jason Nan, <email>janan@ucsd.edu</email></corresp>
<corresp id="c002">Jyoti Mishra, <email>jymishra@health.ucsd.edu</email></corresp>
<fn fn-type="other" id="fn004"><p>This article was submitted to Brain Imaging and Stimulation, a section of the journal Frontiers in Human Neuroscience</p></fn>
</author-notes>
<pub-date pub-type="epub">
<day>11</day>
<month>11</month>
<year>2022</year>
</pub-date>
<pub-date pub-type="collection">
<year>2022</year>
</pub-date>
<volume>16</volume>
<elocation-id>993606</elocation-id>
<history>
<date date-type="received">
<day>13</day>
<month>07</month>
<year>2022</year>
</date>
<date date-type="accepted">
<day>27</day>
<month>10</month>
<year>2022</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2022 Nan, Balasubramani, Ramanathan and Mishra.</copyright-statement>
<copyright-year>2022</copyright-year>
<copyright-holder>Nan, Balasubramani, Ramanathan and Mishra</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p></license>
</permissions>
<abstract>
<p>Inter-subject correlations (ISCs) of physiological data can reveal common stimulus-driven processing across subjects. ISC has been applied to passive video viewing in small samples to measure common engagement and emotional processing. Here, in a large sample study of healthy adults (<italic>N</italic> = 163) who watched an emotional film (The Lion Cage by Charlie Chaplin), we recorded electroencephalography (EEG) across participants and measured ISC in theta, alpha and beta frequency bands. Peak ISC on the emotionally engaging video was observed three-quarters into the film clip, during a time period which potentially elicited a positive emotion of relief. Peak ISC in all frequency bands was focused over centro-parietal electrodes localizing to superior parietal cortex. ISC in both alpha and beta frequencies had a significant inverse relationship with anxiety symptoms. Our study suggests that ISC measured during continuous non-event-locked passive viewing may serve as a useful marker for anxious mood.</p>
</abstract>
<kwd-group>
<kwd>EEG</kwd>
<kwd>inter-subject correlation</kwd>
<kwd>anxiety</kwd>
<kwd>mood</kwd>
<kwd>emotion</kwd>
<kwd>parietal</kwd>
</kwd-group>
<counts>
<fig-count count="4"/>
<table-count count="2"/>
<equation-count count="0"/>
<ref-count count="63"/>
<page-count count="11"/>
<word-count count="7447"/>
</counts>
</article-meta>
</front>
<body>
<sec id="S1" sec-type="intro">
<title>Introduction</title>
<p>Humans, as social creatures, tend to mirror the emotions of others they see, whether in person or digitally. Witnessing a person walk across a tightrope might induce most viewers to feel anxious and stressed, while a cute video of a cat might be heartwarming (<xref ref-type="bibr" rid="B59">Uhrig et al., 2016</xref>). Recent studies have quantified the neural correlates of mirroring or &#x201C;sync up&#x201D; with others by calculating inter-subject correlation (ISC) of neuroimaging and electrophysiological data analyzed on naturalistic stimuli such as films and movie clips. In essence, ISC is a useful neural marker in answering the question &#x201C;How alike is one subject to the rest of the group?&#x201D;. The first major study in this field measured blood-oxygen-level-dependent (BOLD) activity as subjects viewed sections of a movie. High levels of correlated brain activity between subjects were observed in areas beyond basic sensory processing regions, including the limbic system and superior temporal cortex (<xref ref-type="bibr" rid="B28">Hasson et al., 2004</xref>). This suggested that there are common, temporally similar patterns of activity in higher order associative processing regions. Follow up studies (<xref ref-type="bibr" rid="B30">J&#x00E4;&#x00E4;skel&#x00E4;inen et al., 2008</xref>; <xref ref-type="bibr" rid="B42">Nummenmaa et al., 2012</xref>; <xref ref-type="bibr" rid="B14">Di and Biswal, 2020</xref>), using functional magnetic resonance imaging (fMRI) or functional near infrared spectroscopy (fNIRS), have corroborated Hasson&#x2019;s earlier findings, showing ISC in a number of brain regions beyond basic sensory cortex. Additional studies have shown this effect is likely related to emotional processing or engagement and not strictly external stimulus dependent, as ISC can be elicited with narrative content in either written or audio form (<xref ref-type="bibr" rid="B53">Regev et al., 2013</xref>; <xref ref-type="bibr" rid="B61">Wang and He, 2014</xref>).</p>
<p>The BOLD signal measured in most of the studies noted above is a proxy for actual neural activity (frequency band power) and is expensive to measure. Electroencephalography (EEG), which is a low-cost method that directly reflects physiological activity of neural populations, has also been used to measure ISC (<xref ref-type="bibr" rid="B17">Dmochowski et al., 2012</xref>, <xref ref-type="bibr" rid="B16">2014</xref>). Such studies have corroborated those noted above, demonstrating that ISC in EEG signals is greater during periods of high engagement and arousal (<xref ref-type="bibr" rid="B17">Dmochowski et al., 2012</xref>; <xref ref-type="bibr" rid="B6">Cohen et al., 2017</xref>, <xref ref-type="bibr" rid="B7">2018</xref>). For example, subjects hearing the same song in a repetitive manner showed lower ISC compared to a remix of the same song that had less predictable attributes, which would cause subjects to be more attentive (<xref ref-type="bibr" rid="B11">Dauer et al., 2021</xref>). Similarly, when subjects were presented an audiovisual stimulus, lower ISC was observed when they were distracted with mental math (<xref ref-type="bibr" rid="B33">Ki et al., 2016</xref>). One additional benefit of EEG is the high temporal resolution which allows frequency band filtering of specific brain activity. Theta band activity is known to increase in response to facial stimuli that occur during naturalistic viewing (<xref ref-type="bibr" rid="B5">Busch et al., 2009</xref>; <xref ref-type="bibr" rid="B19">Dravida et al., 2019</xref>). Beta oscillations have been associated with attentiveness (<xref ref-type="bibr" rid="B49">Posada-Quintero et al., 2019</xref>), while in other contexts, alpha and beta oscillations have been related to anxiety and stress (<xref ref-type="bibr" rid="B35">Knyazev et al., 2004</xref>; <xref ref-type="bibr" rid="B51">Price and Budzynshi, 2009</xref>; <xref ref-type="bibr" rid="B1">Abhang et al., 2016</xref>). In fact, a study has already noted that ISC during emotional states does show distinct differences across frequency bands, though low frequency (delta) and high frequency gamma bands were most prominent (<xref ref-type="bibr" rid="B40">Maffei, 2020</xref>). As such, band-pass filtering to specific frequency bands is necessary to capture this oscillatory power (<xref ref-type="bibr" rid="B33">Ki et al., 2016</xref>). Altogether EEG-ISC analyses can offer complementary information to that observed from fMRI. The previous studies mentioned all used dense electrode systems, either 64 or 128 channels. Although using higher density EEG systems will provide more spatial resolution, a low-density system (14 channel EEG) was also shown sufficient to capture and measure ISC (<xref ref-type="bibr" rid="B50">Poulsen et al., 2017</xref>).</p>
<p>Thus, we can use ISC not only to assess how alike subjects are to each other, but also their engagement to the emotional stimuli being presented to them (<xref ref-type="bibr" rid="B50">Poulsen et al., 2017</xref>). Several studies have also begun to explore whether ISC differs in individuals with anxiety and depression. For example, ISC is lower in individuals with social anxiety (<xref ref-type="bibr" rid="B41">Morrison et al., 2016</xref>), and major depressive disorders (<xref ref-type="bibr" rid="B27">Guo et al., 2015</xref>). Electrodermal activity measures (EDA) observed decreased ISC values in dysphoric individuals (<xref ref-type="bibr" rid="B39">Li et al., 2021</xref>). Thus, prior work has generally shown that natural videos can be used as a way of identifying &#x201C;common&#x201D; patterns of activity that occur across subjects, and individuals with various levels of anxiety and depression may show greater differences from this common pattern of visual-evoked activity, resulting in lower than expected ISC values.</p>
<p>In this study, we wanted to contribute to the above literature in two ways. First, it is increasingly recognized that a larger sample size is required for robust estimates of neurocognitive phenomena (<xref ref-type="bibr" rid="B58">Szucs and Ioannidis, 2020</xref>; <xref ref-type="bibr" rid="B21">Feng et al., 2022</xref>). As such, we wanted to estimate the degree to which ISC occurs using a large sample of subjects. Second, we were interested in observing, in this large sample, whether ISC might be inversely related to either anxiety or depressive symptoms. To accomplish this, we performed an EEG-based ISC analysis with data parsed into physiologically relevant theta (4&#x2013;8 Hz), alpha (8&#x2013;12 Hz), and beta (15&#x2013;30 Hz) frequency bands as subjects watched an emotionally evocative short movie clip. Additionally, we analyzed the correlation between each subject&#x2019;s average ISC and mental health symptoms of anxiety and depression. We also use sparse Bayesian source localization to identify brain regions that contribute to the EEG-based ISC.</p>
</sec>
<sec id="S2" sec-type="materials|methods">
<title>Materials and methods</title>
<sec id="S2.SS1">
<title>Participants</title>
<p>A total of 163 human subjects participated in the study (mean age: 39.80 &#x00B1; 22.65, range: 15&#x2013;84 years, 59 males). All subjects were fluent in English and provided written informed consent to participate in the study following the University of California San Diego (UCSD) institutional review board (IRB) protocol #180140. Participants were recruited by convenience sampling from the local university and broader San Diego community using flyers and the online Research Match registry. All data collection took place prior to the COVID-19 pandemic research restrictions placed in Spring 2020.</p>
<p>Participants provided demographics data with regards to age, gender, ethnicity, and provided mental health data on standard scales of generalized anxiety (7-item generalized anxiety disorder scale, GAD7) (<xref ref-type="bibr" rid="B56">Spitzer et al., 2006</xref>), and depression (9-item patient health questionnaire, PHQ9) (<xref ref-type="bibr" rid="B37">Kroenke et al., 2001</xref>). All participants were healthy, i.e., did not have any current medical diagnosis nor were taking any current psychotropic medications.</p>
</sec>
<sec id="S2.SS2">
<title>Sample size and power</title>
<p>Our participant sample size met criteria for investigating greater than small effect size outcomes across subjects (neurobehavioral correlation coefficient, <italic>r</italic> &#x003E; 0.1) at beta power of 0.8 and alpha of 0.05, as calculated using the G&#x002A;Power software (<xref ref-type="bibr" rid="B20">Faul et al., 2009</xref>).</p>
</sec>
<sec id="S2.SS3">
<title>Data acquisition</title>
<p>All participants made individual, single session study-visits at the Neural Engineering and Translational Labs (NEAT Labs) located at the Altman Clinical and Translational Research Institute at the University of California San Diego. Participants logged into the <italic>BrainE</italic> Unity platform (<xref ref-type="bibr" rid="B4">Balasubramani et al., 2021</xref>), and viewed the short film &#x201C;The Lion Cage&#x201D; by Charlie Chaplin video that lasted 3.83 min. The video was delivered on a Windows-10 laptop at a comfortable viewing distance while recording EEG signals. We did not employ a scrambled video as control because the study goal was focused on investigating effects of anxiety and depression in processing emotional narratives, which requires the stimulus to stay intact. Although a scrambled stimulus maybe beneficial to compare with ISC to real stimuli, previous research has also omitted this control (<xref ref-type="bibr" rid="B16">Dmochowski et al., 2014</xref>). The Lab Streaming Layer (LSL) protocol was used to time-stamp the beginning and end of the video clip (<xref ref-type="bibr" rid="B36">Kothe et al., 2019</xref>).</p>
<p>Electroencephalography data were collected using a 24-channel saline soaked cap following the 10&#x2013;20 system and a wireless SMARTING amplifier. The signals were digitized with a sampling rate of 500 Hz and 24-bit resolution and stored as .xdf files.</p>
</sec>
<sec id="S2.SS4">
<title>Stimulus</title>
<p>&#x201C;The Lion Cage&#x201D; depicts a man (Charlie Chaplin) in a circus getting accidentally locked in a cage with a sleeping lion and his efforts to escape without waking the lion. This film was chosen due to its emotional rollercoaster nature (scenes of high stress and anxiety mixed in with emotional relief), which we hypothesized would induce higher emotions in the subjects and show significant levels of ISC. Using a black and white silent film format also reduces confounding variables and puts more focus on the emotional narrative (<xref ref-type="bibr" rid="B60">Vandewalle et al., 2010</xref>; <xref ref-type="bibr" rid="B23">Gerdes et al., 2014</xref>). The entire film and subsequent data were parsed into 10 consecutive time windows to help isolate scenes of interest, with each window containing an interval of 23 s. This parsing was used for simple standardization (i.e., to have a standardized window of time) and was not related to aspects of the scene or video. The authors also independently coded these 10 windows with varying emotions and intensity scores on a 10-point scale to create an emotion key. After the ratings were proven reliable with a Cronbach&#x2019;s alpha value of 0.704, the authors met to resolve differences and came to a consensus on a single key for simplicity shown in <xref ref-type="table" rid="T2">Table 2 (Cronbach, 1951</xref>).</p>
<table-wrap position="float" id="T1">
<label>TABLE 1</label>
<caption><p>Summary of participant demographics and mental health symptoms self-reported by healthy study subjects.</p></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<td valign="top" align="left" colspan="2">&#x2003;Demographics</td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Age (years, mean &#x00B1; std)</td>
<td valign="top" align="center">39.80 &#x00B1; 22.65</td>
</tr>
<tr>
<td valign="top" align="left">Gender <italic>n</italic> (%)</td>
<td valign="top" align="center">&#x2013;</td>
</tr>
<tr>
<td valign="top" align="left">Male</td>
<td valign="top" align="center">59 (34.4%)</td>
</tr>
<tr>
<td valign="top" align="left">Female</td>
<td valign="top" align="center">103 (63.6%)</td>
</tr>
<tr>
<td valign="top" align="left">Ethnicity <italic>n</italic> (%)</td>
<td valign="top" align="center">&#x2013;</td>
</tr>
<tr>
<td valign="top" align="left">Caucasian</td>
<td valign="top" align="center">95 (58.6%)</td>
</tr>
<tr>
<td valign="top" align="left">Black/African American</td>
<td valign="top" align="center">3 (1.9%)</td>
</tr>
<tr>
<td valign="top" align="left">Asian</td>
<td valign="top" align="center">40 (24.7%)</td>
</tr>
<tr>
<td valign="top" align="left">Native American</td>
<td valign="top" align="center">2 (1.2%)</td>
</tr>
<tr>
<td valign="top" align="left">More than one ethnicity</td>
<td valign="top" align="center">14 (8.6%)</td>
</tr>
<tr>
<td valign="top" align="left">Other</td>
<td valign="top" align="center">8 (4.9%)</td>
</tr>
<tr>
<td valign="top" align="left">Anxiety (mean &#x00B1; std)</td>
<td valign="top" align="center">4.11 &#x00B1; 4.48</td>
</tr>
<tr>
<td valign="top" align="left">Depression (mean &#x00B1; std)</td>
<td valign="top" align="center">3.78 &#x00B1; 4.20</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn><p>Anxiety and depression were self-reported on the GAD7 and PHQ9 scales, respectively.</p></fn>
</table-wrap-foot>
</table-wrap>
<table-wrap position="float" id="T2">
<label>TABLE 2</label>
<caption><p>Description of the 10 scene segments and potential evoked emotional responses as keyed by the authors.</p></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<td valign="top" align="left">Window # (Seconds)</td>
<td valign="top" align="left">Scene</td>
<td valign="top" align="center">Evoked response</td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">1 (0&#x2013;23)</td>
<td valign="top" align="left">Charlie Chaplin (CC) is in a cage with a sleeping lion</td>
<td valign="top" align="center">C(6.3), F(6)</td>
</tr>
<tr>
<td valign="top" align="left">2 (23&#x2013;46)</td>
<td valign="top" align="left">CC waves a handkerchief to try and get help</td>
<td valign="top" align="center">S(6)</td>
</tr>
<tr>
<td valign="top" align="left">3 (46&#x2013;69)</td>
<td valign="top" align="left">CC crawls through a hole where he is unknowingly close to a tiger</td>
<td valign="top" align="center">F(7), A(6)</td>
</tr>
<tr>
<td valign="top" align="left">4 (69&#x2013;92)</td>
<td valign="top" align="left">There is a dog jumping and barking at the cage with the sleeping lion</td>
<td valign="top" align="center">S(4.6), AG(4.3), F(3), H(3)</td>
</tr>
<tr>
<td valign="top" align="left">5 (92&#x2013;115)</td>
<td valign="top" align="left">A woman approaches the cage and subsequently passes out</td>
<td valign="top" align="center">AG(4), S(3), H(3),</td>
</tr>
<tr>
<td valign="top" align="left">6 (115&#x2013;138)</td>
<td valign="top" align="left">The lion wakes up and approaches CC</td>
<td valign="top" align="center">F(8)</td>
</tr>
<tr>
<td valign="top" align="left">7 (138&#x2013;161)</td>
<td valign="top" align="left">The lion walks away, rolls over onto his back, and goes to sleep</td>
<td valign="top" align="center">R(5.3), H(3.7), S(3)</td>
</tr>
<tr>
<td valign="top" align="left">8 (161&#x2013;184)</td>
<td valign="top" align="left">The woman wakes up and opens the cage door; CC showcases a sense of relief</td>
<td valign="top" align="center">R(8.7)</td>
</tr>
<tr>
<td valign="top" align="left">9 (184&#x2013;207)</td>
<td valign="top" align="left">CC runs out of the cage and up a flagpole to get away from the lion&#x2019;s cage</td>
<td valign="top" align="center">H(7.3), F(3.3), S(3), A(3)</td>
</tr>
<tr>
<td valign="top" align="left">10 (207&#x2013;230)</td>
<td valign="top" align="left">Film ends with CC taking a bow</td>
<td valign="top" align="center">R(6), H(5.3)</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn><p>Emotion key: F, fear; A, anxiety; S, Stress; R, relief; AG, anger; C, confusion; H, humor. The number in parentheses is the average intensity score given by the authors on a 10 point scale (Cronbach&#x2019;s alpha = 0.704).</p></fn>
</table-wrap-foot>
</table-wrap>
</sec>
<sec id="S2.SS5">
<title>Pre-processing of electroencephalography channel data</title>
<p>A standard pipeline was used for all subjects to clean the data through EEGLAB v 14.1.2 in MATLAB (<xref ref-type="bibr" rid="B12">Delorme and Makeig, 2004</xref>). Using EEGLAB, the data was resampled to 250 Hz and bandpass filtered between 1 and 45 Hz. This was achieved with the eegfiltnew() function which uses an 826 order Hamming windowed sinc FIR filter (<xref ref-type="bibr" rid="B62">Widmann et al., 2015</xref>). This removes DC drift at &#x003C;1 Hz and high frequency noise originating from muscle contractions or 60 Hz line noise. All channels used an average reference. The EEG data were then parsed to isolate the start and stop of the film while also removing any pauses during the film&#x2019;s playback from event markers generated by the LSL. Artifact rejection was performed automatically using the Sparse Bayesian Learning Algorithm (BSBL) (<xref ref-type="bibr" rid="B44">Ojeda et al., 2018</xref>, <xref ref-type="bibr" rid="B43">2021</xref>) to remove non-EEG-signals, i.e., signals of electrooculographic (EOG), electromyographic (EMG), or unknown source origin. Outlier rejection was done to further clean the data, excluding any data points that were greater than 5 standard deviations (5SD) above the average. The cleaned data was then filtered into three frequency bands, theta (4&#x2013;8 Hz), alpha (8&#x2013;12 Hz), and beta (15&#x2013;30 Hz) for individual analysis. Our experimental setup was not in a very low-noise/shielded environment to allow for gamma band analyses, hence, this frequency band was not included in the analyses.</p>
</sec>
<sec id="S2.SS6">
<title>Inter-subject correlation</title>
<p>To extract the instantaneous power for correlation analysis, we transformed each subject&#x2019;s channel data into Hilbert space, calculated the magnitude at each discrete time point (<xref ref-type="bibr" rid="B22">Freeman, 2004</xref>) and then standardized the data by <italic>z</italic>-scoring. Using the MATLAB function glmfit, we fit linear regression models for every pair of subjects using the 23 s of data within each of the time windows across the 24 electrodes and three frequency bands. Thus, for each electrode, time-window and frequency band we created a symmetric N &#x00D7; N matrix (&#x03B2;) where N is the number of subjects and &#x03B2;<sub><italic>ij</italic></sub> represents the beta value (slope) between subjects i and j. In other words, the &#x03B2; matrix is comprised of individual beta values for all pairwise combinations of subjects. These beta values are regression coefficients and thus not scaled between &#x00B1;1. <italic>P</italic>-values were calculated from the upper triangular elements of this matrix using a one-sample <italic>t</italic>-test across all unique intersubject &#x03B2;<sub><italic>ij</italic></sub> values for significance relative to the null hypothesis, with the null hypothesis being that the average inter-subject beta value would be 0 for all electrode, time window and frequency combinations indicating no correlation between any of the subjects (<xref ref-type="bibr" rid="B63">Wilson et al., 2008</xref>; <xref ref-type="bibr" rid="B31">Kauppi, 2010</xref>). A total of 100 iterations of permutation testing were also performed for the peak window to estimate the likelihood of mean beta-values above occurring from uncorrelated data. Each subject&#x2019;s neural activity was scrambled across time, followed by the ISC calculation as described above for each frequency and electrode. These 100 repeats served as our random distribution, and we calculated the percentile at which the real data sat relative to the random distribution. Permutation testing confirmed significance of the real beta values in the peak window at greater than 99th percentile of the permuted random distribution.</p>
<p>Average beta values for each electrode and time window were plotted on a heatmap masked with significance. Adjusted <italic>p</italic>-values from Benjamini-Hochberg false discover rate (FDR) were used to resolve multiple comparisons across the 24 electrodes and 10 time windows. We identified the 8th time window (at 184&#x2013;207 s in the video) for further analysis as it showed the largest and most significant ISC values when averaged across electrodes for each frequency band. This was further confirmed by running a repeated measures ANOVA across the 10 time windows followed by Tukey&#x2019;s <italic>post-hoc</italic> test. Beta values were averaged across frequency and electrodes to find the overall peak window of interest. We chose to analyze the peak ISC because that would presumably be the timeframe of highest engagement across subjects as well as when healthy subjects are most &#x201C;in sync&#x201D; (<xref ref-type="bibr" rid="B17">Dmochowski et al., 2012</xref>; <xref ref-type="bibr" rid="B6">Cohen et al., 2017</xref>, <xref ref-type="bibr" rid="B7">2018</xref>). The ISC data for this time window was then plotted on a 2D scalp topography representation for better visualization with the topoplot.m function in MATLAB&#x2019;s EEGLAB toolbox. In addition to the ISC data, the average activity across subjects during the peak ISC window, i.e., 8th time window, was plotted in a similar manner to determine peak activity electrode clusters for further analysis. The topography plots revealed the midline centro-parietal region electrodes (Cz, CPz, Pz, POz) had highest ISC values as well as neural activity. Peak ISC and neural activity were compared across frequency bands using within-subjects repeated measures analysis of variance (rm-ANOVA) with the Greenhouse&#x2013;Geisser correction applied to adjust for lack of sphericity.</p>
</sec>
<sec id="S2.SS7">
<title>Source-localized analysis</title>
<p>We performed cortical source localization to map the underlying neural source activations using the block-sparse Bayesian learning (BSBL-2S) algorithm (<xref ref-type="bibr" rid="B44">Ojeda et al., 2018</xref>, <xref ref-type="bibr" rid="B43">2021</xref>). This is a two-step algorithm in which the first-step is equivalent to low-resolution electromagnetic tomography (LORETA) (<xref ref-type="bibr" rid="B45">Pascual-Marqui et al., 1994</xref>). LORETA estimates sources subject to smoothness constraints, i.e., nearby sources tend to be co-activated, which may produce source estimates with a high number of false positives that are not biologically plausible. To guard against this, BSBL-2S applies sparsity constraints in the second step wherein blocks of irrelevant sources are pruned. This data-driven sparsity constraint of the Sparse Bayesian Learning (SBL) method reduces the effective number of sources considered at any given time as a solution, thereby reducing the ill-posed nature of the inverse mapping (<xref ref-type="bibr" rid="B44">Ojeda et al., 2018</xref>, <xref ref-type="bibr" rid="B43">2021</xref>). In other words, one can either increase the number of channels used to solve the ill-posed inverse problem or impose more aggressive constraints on the solution to converge on the source model when channel density is low/moderate; 24 channels in this case. The two-stage SBL has been benchmarked to produce evidence-optimized inverse source models at 0.95 AUC relative to the ground truth, while without the second stage &#x003C; 0.9 AUC is obtained (<xref ref-type="bibr" rid="B44">Ojeda et al., 2018</xref>, <xref ref-type="bibr" rid="B43">2021</xref>). Prior research also provides support that sparse source imaging constraints can be soundly applied to low channel density data (<xref ref-type="bibr" rid="B15">Ding and He, 2008</xref>; <xref ref-type="bibr" rid="B57">Stopczynski et al., 2014</xref>), and we have also shown that cortical source mapping with this method has high test-retest reliability with Cronbach&#x2019;s alpha of 0.77 (<xref ref-type="bibr" rid="B4">Balasubramani et al., 2021</xref>).</p>
<p>Prior to source analysis, EEG data were specifically filtered in theta (4&#x2013;8 Hz), alpha (8&#x2013;12 Hz), and beta (15&#x2013;30 Hz) bands and separately source localized in each of the three frequency bands to estimate their cortical ROI source signals. Using BSBL-2S, the 24-channel frequency band specific data were mapped onto 68 cortical brain regions as defined by the Desikan-Killiany (DK) atlas (<xref ref-type="bibr" rid="B13">Desikan et al., 2006</xref>) with the Colin-27 head model (<xref ref-type="bibr" rid="B29">Holmes et al., 1998</xref>). For this, the source model included 8,003 dipoles that were then parcellated into the DK atlas 68 cortical regions by averaging the magnitudes of the dipole sources in the same cortical region (<xref ref-type="bibr" rid="B44">Ojeda et al., 2018</xref>, <xref ref-type="bibr" rid="B43">2021</xref>). The signal envelope was calculated to obtain the source spectral amplitude of each brain region across time in each subject. Artifacts from external factors such as jaw or eye movement that may remain in source space require an alternate approach for outlier rejection beyond 5 SD; for these, we employed the Grubbs statistical test to iteratively remove outliers and replace them using a spline interpolation&#x2014;an option available within the MATLAB isoutlier function. ISC was performed on the cleaned subject source data in all ROIs and the average ISC across subjects in peak time window 8 was plotted on cortical brain maps in the three frequency bands with one-sample <italic>t</italic>-test significance testing of &#x03B2; coefficients performed relative to null; <italic>p</italic>-values were FDR corrected across 68 brain regions and three frequency bands.</p>
</sec>
<sec id="S2.SS8">
<title>Neurobehavioral correlations with mental health</title>
<p>We used Spearman&#x2019;s correlation to investigate associations between each subjects&#x2019; anxiety (GAD7)/depression (PHQ9) score and their average ISC value within the peak window and electrode cluster identified earlier. Spearman&#x2019;s correlation analyses were used as these are less sensitive (though not completely insensitive) to outlier effects (<xref ref-type="bibr" rid="B54">Rousselet and Pernet, 2012</xref>). Results were FDR <italic>p</italic> &#x003C; 0.05 corrected for multiple comparisons. Spearman&#x2019;s rho (&#x03C1;) values indicated effect size: 0.1: small, 0.3: medium, 0.5: large effect size. Age and gender were also tested as covariates using Spearman&#x2019;s partial correlations.</p>
</sec>
</sec>
<sec id="S3" sec-type="results">
<title>Results</title>
<p><xref ref-type="table" rid="T1">Table 1</xref> shows subject demographic and mental health (anxiety and depression) symptom report data that was available for 162 of 163 human subject participants. The sample had about a 2:1 ratio of females to males and a Caucasian majority. Subjects with anxiety/depression scores &#x003E; 5 are considered to have mild symptoms; there were 28.75 and 22.01% subjects with mild anxiety and mild depression symptoms, respectively.</p>
<p><xref ref-type="fig" rid="F1">Figure 1</xref> shows brief screenshots of the short film where each screenshot is meant to capture the scene of each particular 23 s time window. Below it are heatmaps of the ISC values across the film&#x2019;s 10 time windows, electrodes, and frequency bands. Repeated measures ANOVA resulted in <italic>F</italic>(9,1.2E5) = 32.9, <italic>p</italic> &#x003C; 1.3E-36 after Greenhouse&#x2013;Geisser correlation for sphericity. <italic>Post-hoc</italic> Tukey&#x2019;s test revealed that peak window 8 was significantly different than all other windows (<italic>p</italic> &#x003C; 0.0002). <xref ref-type="table" rid="T2">Table 2</xref> provides a scene description by time window, and the potential emotion that may be evoked when watching the scene.</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption><p><bold>(A)</bold> Screenshots representative of 10 time windows from the film. The red border indicates the peak 8th time window. <bold>(B)</bold> Heat map of the inter-subject correlation (ISC) values per electrode and time window across three frequency bands; peak ISC was observed in the 8th time window. Cells with &#x201C;+&#x201D; indicate FDR-corrected <italic>p</italic> &#x003C; 0.05 significance relative to null.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnhum-16-993606-g001.tif"/>
</fig>
<p>From the ISC heatmaps, we observed that highest average inter-subject correlation across electrodes was found during time window 8 (highlighted in red in <xref ref-type="fig" rid="F1">Figure 1</xref> scene panel), which coincided with the permanent onset of emotional relief. Almost all electrodes showed significant correlation across time, <italic>p</italic> &#x003C; 0.05 FDR-corrected. FDR-correction was performed across the 24 electrodes and 10 time windows, i.e., correcting for 240 statistical tests, separately for each frequency band. For all further analyses, we focused on the peak ISC time window 8 which had the highest relief rating by all authors across the 10 scenes.</p>
<p><xref ref-type="fig" rid="F2">Figure 2</xref> shows the scalp topography of the peak time window 8 ISC, showing clear focality in the midline centro-parietal electrodes in all theta, alpha and beta frequency bands. The B panel shows the corresponding average neural activity during the same time window, which interestingly showed lateral but not central maxima, suggesting ISC magnitude was not a simple function of activity magnitude. The theta frequency band appeared by eye to have the highest magnitude ISC values compared to the other frequency bands; hence, we statistically analyzed this. The bar graphs in the right panel quantify and compare the magnitude of ISC (A) and activity (B) across frequency bands in the peak (Cz, CPz, Pz, and POz) electrodes (within-subjects rm-ANOVA across frequency bands, ISC: <italic>F</italic>(2, 46) = 45.42, <italic>p</italic> = 1.95E-7, average activity: <italic>F</italic>(2, 46) = 22.52, <italic>p</italic> = 4.1E-5). ISC was largest for the theta frequency band compared to both other frequencies, and alpha showed higher ISC than beta frequency bands.</p>
<fig id="F2" position="float">
<label>FIGURE 2</label>
<caption><p>Scalp topography maps of peak ISC window 8 across three frequency bands. <bold>(A)</bold> ISC values and <bold>(B)</bold> average neural activity on the scalp. The bar graph panel on the right plots the mean &#x00B1; std peak time window ISC and neural activity values at midline centro-parietal electrodes across the three frequency bands. &#x002A;&#x002A;&#x002A;&#x002A;<italic>p</italic> &#x003C; 0.001.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnhum-16-993606-g002.tif"/>
</fig>
<p><xref ref-type="fig" rid="F3">Figure 3</xref> shows the neurobehavioral correlations between the peak ISC (within peak time window eight and peak midline centro-parietal channels) and subjects&#x2019; mental health scores. All correlations were FDR-corrected for multiple comparisons. Neurobehavioral correlations with anxiety in the alpha and beta band showed a significant inverse relationship [alpha band rho (&#x03C1;) = &#x2013;0.244, <italic>p</italic> = 0.012; beta band &#x03C1; = &#x2013;0.221, <italic>p</italic> = 0.015]; correlation with anxiety in the theta band was not significant (theta band &#x03C1; = &#x2013;0.115, <italic>p</italic> = 0.179). Neurobehavioral correlations were anxiety specific and did not achieve FDR-corrected significance with depression scores in our healthy sample (<italic>p</italic> &#x003E; 0.05). Age and gender were not significant covariates of these ISC-anxiety neurobehavioral relationships (Spearman partial correlations, alpha ISC vs. age &#x03C1; = &#x2013;0.073, <italic>p</italic> = 0.361; vs. gender &#x03C1; = &#x2013;0.123, <italic>p</italic> = 0.123; beta ISC vs. age &#x03C1; = 0.017, <italic>p</italic> = 0.831; vs. gender &#x03C1; = &#x2013;0.129, <italic>p</italic> = 0.105; although age and anxiety were significantly inversely related in these partial correlations: &#x03C1; = &#x2013;0.33, <italic>p</italic> &#x003C; 0.0001, anxiety and gender were not: &#x03C1; = 0.08, <italic>p</italic> = 0.3).</p>
<fig id="F3" position="float">
<label>FIGURE 3</label>
<caption><p>Neurobehavioral correlations between peak ISC and mental health symptoms of anxiety and depression, across the three frequency bands.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnhum-16-993606-g003.tif"/>
</fig>
<p>Finally, <xref ref-type="fig" rid="F4">Figure 4</xref> shows the EEG cortical source reconstruction of the peak window ISC data, mapped onto the 68 ROI brain regions as per the Desikan-Killiany atlas. Similar to <xref ref-type="fig" rid="F3">Figure 3</xref>, theta band ISC had larger magnitude values than alpha and beta band with greatest intensity in the right superior parietal cortex across all bands.</p>
<fig id="F4" position="float">
<label>FIGURE 4</label>
<caption><p>Significant brain regions of ISC during the peak time window across the three frequency bands.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnhum-16-993606-g004.tif"/>
</fig>
</sec>
<sec id="S4" sec-type="discussion">
<title>Discussion</title>
<p>Inter-subject correlation of physiological data can provide useful proof of how similarly humans process stimulus information (<xref ref-type="bibr" rid="B28">Hasson et al., 2004</xref>). Several neuroimaging studies using fMRI during video watching have shown evidence of ISC in sensory, multisensory and limbic brain regions (<xref ref-type="bibr" rid="B30">J&#x00E4;&#x00E4;skel&#x00E4;inen et al., 2008</xref>; <xref ref-type="bibr" rid="B42">Nummenmaa et al., 2012</xref>; <xref ref-type="bibr" rid="B14">Di and Biswal, 2020</xref>). Recent studies have utilized EEG for generating ISC during classroom video watching (<xref ref-type="bibr" rid="B50">Poulsen et al., 2017</xref>) and to detect stress/anxiety states from EEG features during video watching (<xref ref-type="bibr" rid="B24">Giannakakis et al., 2015</xref>). Here, we extend this literature to ask whether ISC during naturalistic video watching especially in a large subject sample relates to mental health. We find that ISC, as a physiological marker of inter-subject neural processing similarity during passive viewing, has a significant negative relationship with anxiety, showing stronger correlation in subjects without anxiety than with anxiety.</p>
<p>We observed that peak ISC was obtained in EEG signals when passively viewing a particular scene segment (161&#x2013;184 s) in a video clip of Charlie Chaplin&#x2019;s (CC) film &#x201C;Lion Cage&#x201D; that was 230 s in total duration. This 8th time window is observed to coincide with the possible onset of strongest emotional relief in the film when CC is let out of the dangerous lion&#x2019;s cage. This period contrasts to the rest of the film up to this point, potentially transitioning viewers to a more relaxed state of mind. As previous research has suggested, we would expect to see high ISC with low variation across subjects as they all go through the same emotional response (<xref ref-type="bibr" rid="B30">J&#x00E4;&#x00E4;skel&#x00E4;inen et al., 2008</xref>). Interestingly, the magnitude of peak ISC in the alpha/beta frequency bands at the centro-parietal electrodes was inversely related to subjects&#x2019; self-reported anxiety, i.e., more anxious individuals reporting greater anxiety had lower ISC scores. Greater activity in the alpha and beta bands have been related to anxiety and stress (<xref ref-type="bibr" rid="B35">Knyazev et al., 2004</xref>; <xref ref-type="bibr" rid="B51">Price and Budzynshi, 2009</xref>; <xref ref-type="bibr" rid="B1">Abhang et al., 2016</xref>). More pertinently, it is possible that individuals with higher levels of anxiety are less distinct during this epoch when, in general, there is a sense of &#x201C;relief.&#x201D; Higher theta activity is also expected with the presentation of visual stimuli as well as high facial perception, which is also observed in our analysis (<xref ref-type="bibr" rid="B5">Busch et al., 2009</xref>; <xref ref-type="bibr" rid="B19">Dravida et al., 2019</xref>). We further verified that these neurobehavioral correlations were not affected by age or gender. We also confirmed that this peak ISC localized to the superior parietal brain region, which is a well-known component of the attentional network (<xref ref-type="bibr" rid="B9">Corbetta and Shulman, 2002</xref>; <xref ref-type="bibr" rid="B18">Dosenbach et al., 2007</xref>). Meta-analyses have identified this region as important for attention and emotion, coding for emotional dysregulation especially in mood and anxiety disorders (<xref ref-type="bibr" rid="B8">Compton et al., 2003</xref>; <xref ref-type="bibr" rid="B47">Pic&#x00F3;-P&#x00E9;rez et al., 2017</xref>). ISC-anxiety correlations being most significant in the alpha and beta bands suggests local region-specific modulation selective for attention and emotion processing (<xref ref-type="bibr" rid="B52">Ray and Cole, 1985</xref>; <xref ref-type="bibr" rid="B34">Klimesch et al., 1998</xref>; <xref ref-type="bibr" rid="B55">Schutter et al., 2001</xref>; <xref ref-type="bibr" rid="B25">G&#x00FC;ntekin and Basar, 2007</xref>; <xref ref-type="bibr" rid="B26">G&#x00FC;ntekin and Ba&#x015F;ar, 2010</xref>; <xref ref-type="bibr" rid="B46">Peylo et al., 2021</xref>). Alpha activity in parietal cortex has been postulated to select visual information for attentive processing (<xref ref-type="bibr" rid="B46">Peylo et al., 2021</xref>), and the parietal alpha ISC-anxiety correlations may suggest that individuals with greater anxiety may not process relief, the positive emotion in the peak time window, as readily as those with lower anxiety scores.</p>
<p>Limitations of this research include use of a moderate density electrode system, which may lead to more approximate source localization; these results could be verified by high density EEG mapping or other neuroimaging in future research. Furthermore, the lack of having multiple videos (and/or scrambled videos) as a control stimulus, may complicate the interpretation of our results. With regards to neural signal processing, we used a bandpass filtering approach to extract frequency band information. However, as noted in <xref ref-type="bibr" rid="B62">Widmann et al. (2015)</xref>, separate low and high pass filtering may be better for attenuating the signals below the cutoff frequency particularly for the high-pass filters. Thus, it is possible that our data within theta, alpha and beta frequencies have some degree of contamination from outside the filter band. This contamination effect may to some extent explain why we observe similar findings in the alpha and beta frequencies, in particular, similar correlations with anxiety. Importantly, though, the theta and alpha frequency bands are both narrower and closer together and thus more likely to show cross-band contamination, and we found that, while alpha-frequency electrodes significantly correlated with symptoms such as for depression (<italic>r</italic> = &#x2013;0.177), theta frequency band signals did not (<italic>r</italic> = 0.003), suggesting some frequency band specificity. An additional limitation of this study is that our symptom correlation analysis relied purely on subjective reporting on symptoms scales of anxiety and depression. We did not have a systematic way to interrogate or otherwise clinically verify whether these scores were an accurate depiction of an individual&#x2019;s actual level of anxiety or depression, although notably the prevalence of symptoms in this study reflects the prevalence of anxiety/depression in a mixed community and college sample (<xref ref-type="bibr" rid="B32">Kessler et al., 2012</xref>; <xref ref-type="bibr" rid="B38">Li et al., 2022</xref>). That symptom scales may have been misreported, or there may simply have been some misunderstanding as individuals were responding, is a small possibility. However, we believe this type of misreporting/error would have been more likely to reduce rather than increase any neurobehavioral correlations observed, and is a general challenge with many neurobehavioral correlation studies. As such, results also need to be extended to individuals with clinically diagnosed anxiety to investigate if these relationships hold true in more anxious individuals in a clinical population.</p>
<p>In conclusion, the neurophysiological ISC measures may parallel the mirrored human emotions elicited during video watching. Being able to properly understand and feel similar emotions as others around us, i.e., empathy, is an important trait possessed by healthy humans. Yet, research has shown that individuals with anxiety, especially social anxiety, have difficulty understanding positive emotions of others as readily as healthy controls (<xref ref-type="bibr" rid="B41">Morrison et al., 2016</xref>). Other research suggests that empathy may be intact in social anxiety but that there may be deficits in prosocial action (<xref ref-type="bibr" rid="B2">Auyeung and Alden, 2016</xref>, <xref ref-type="bibr" rid="B3">2020</xref>), and that links between anxiety and empathy are not fully clear (<xref ref-type="bibr" rid="B48">Pittelkow et al., 2021</xref>). Our physiological analysis suggests that aspects of parietal attention, especially during a period of positive emotion, i.e., relief from prior stress during emotional video viewing, may not occur as readily in more anxious individuals. Furthermore, we show that these neurobehavioral relationships hold true for continuous data that is not event-locked to specific stimuli.</p>
</sec>
<sec id="S5" sec-type="data-availability">
<title>Data availability statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec id="S6">
<title>Ethics statement</title>
<p>The studies involving human participants were reviewed and approved by the University of California San Diego IRB. The patients/participants provided their written informed consent to participate in this study.</p>
</sec>
<sec id="S7">
<title>Author contributions</title>
<p>PB, DR, and JM contributed to the conception and design of the study. JN performed the statistical analysis and wrote the first draft of the manuscript. All authors contributed to the manuscript revision, read, and approved the submitted version.</p>
</sec>
</body>
<back>
<sec id="S8" sec-type="funding-information">
<title>Funding</title>
<p>This work was supported by University of California San Diego (UCSD) start-up funds (JM), and a grant from the Sanford Institute for Empathy and Compassion at UCSD (JM).</p>
</sec>
<ack>
<p>We thank Alankar Misra for software development of the <italic>BrainE</italic> software and several UCSD undergraduate students who assisted with data collection. The <italic>BrainE</italic> software is copyrighted for commercial use (Regents of the University of California Copyright #SD2018-816) and free for research and educational purposes.</p>
</ack>
<sec id="S9" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="S10" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Abhang</surname> <given-names>P. A.</given-names></name> <name><surname>Gawali</surname> <given-names>B. W.</given-names></name> <name><surname>Mehrotra</surname> <given-names>S. C.</given-names></name></person-group> (<year>2016</year>). &#x201C;<article-title>Technical Aspects of Brain Rhythms and Speech Parameters</article-title>,&#x201D; in <source><italic>Introduction to EEG- and speech-based emotion recognition (Elsevier)</italic></source>, <role>eds</role> <person-group person-group-type="editor"><name><surname>Abhang</surname> <given-names>P. A.</given-names></name> <name><surname>Gawali</surname> <given-names>B. W.</given-names></name> <name><surname>Mehrotra</surname> <given-names>S. C.</given-names></name></person-group> (<publisher-loc>Cambridge, MA</publisher-loc>: <publisher-name>Academic Press</publisher-name>), <fpage>51</fpage>&#x2013;<lpage>79</lpage>.</citation></ref>
<ref id="B2"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Auyeung</surname> <given-names>K. W.</given-names></name> <name><surname>Alden</surname> <given-names>L. E.</given-names></name></person-group> (<year>2016</year>). <article-title>Social anxiety and empathy for social pain.</article-title> <source><italic>Cognit. Ther. Res.</italic></source> <volume>40</volume> <fpage>38</fpage>&#x2013;<lpage>45</lpage>. <pub-id pub-id-type="doi">10.1007/s10608-015-9718-0</pub-id></citation></ref>
<ref id="B3"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Auyeung</surname> <given-names>K.</given-names></name> <name><surname>Alden</surname> <given-names>L. E.</given-names></name></person-group> (<year>2020</year>). <article-title>Accurate empathy, social rejection, and social anxiety disorder.</article-title> <source><italic>Clin. Psychol. Sci.</italic></source> <volume>8</volume> <fpage>266</fpage>&#x2013;<lpage>279</lpage>. <pub-id pub-id-type="doi">10.1177/2167702619885410</pub-id></citation></ref>
<ref id="B4"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Balasubramani</surname> <given-names>P. P.</given-names></name> <name><surname>Ojeda</surname> <given-names>A.</given-names></name> <name><surname>Grennan</surname> <given-names>G.</given-names></name> <name><surname>Maric</surname> <given-names>V.</given-names></name> <name><surname>Le</surname> <given-names>H.</given-names></name> <name><surname>Alim</surname> <given-names>F.</given-names></name><etal/></person-group> (<year>2021</year>). <article-title>Mapping cognitive brain functions at scale.</article-title> <source><italic>Neuroimage</italic></source> <volume>231</volume>:<issue>117641</issue>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2020.117641</pub-id> <pub-id pub-id-type="pmid">33338609</pub-id></citation></ref>
<ref id="B5"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Busch</surname> <given-names>N. A.</given-names></name> <name><surname>Dubois</surname> <given-names>J.</given-names></name> <name><surname>VanRullen</surname> <given-names>R.</given-names></name></person-group> (<year>2009</year>). <article-title>The phase of ongoing EEG Oscillations predicts visual perception.</article-title> <source><italic>J. Neurosci.</italic></source> <volume>29</volume> <fpage>7869</fpage>&#x2013;<lpage>7876</lpage>. <pub-id pub-id-type="doi">10.1523/JNEUROSCI.0113-09.2009</pub-id> <pub-id pub-id-type="pmid">19535598</pub-id></citation></ref>
<ref id="B6"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cohen</surname> <given-names>S. S.</given-names></name> <name><surname>Henin</surname> <given-names>S.</given-names></name> <name><surname>Parra</surname> <given-names>L. C.</given-names></name></person-group> (<year>2017</year>). <article-title>Engaging narratives evoke similar neural activity and lead to similar time perception.</article-title> <source><italic>Sci. Rep.</italic></source> <volume>7</volume>:<issue>4578</issue>. <pub-id pub-id-type="doi">10.1038/s41598-017-04402-4</pub-id> <pub-id pub-id-type="pmid">28676688</pub-id></citation></ref>
<ref id="B7"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cohen</surname> <given-names>S. S.</given-names></name> <name><surname>Madsen</surname> <given-names>J.</given-names></name> <name><surname>Touchan</surname> <given-names>G.</given-names></name> <name><surname>Robles</surname> <given-names>D.</given-names></name> <name><surname>Lima</surname> <given-names>S. F. A.</given-names></name> <name><surname>Henin</surname> <given-names>S.</given-names></name><etal/></person-group> (<year>2018</year>). <article-title>Neural engagement with online educational videos predicts learning performance for individual students.</article-title> <source><italic>Neurobiol. Learn. Mem.</italic></source> <volume>155</volume> <fpage>60</fpage>&#x2013;<lpage>64</lpage>. <pub-id pub-id-type="doi">10.1016/j.nlm.2018.06.011</pub-id> <pub-id pub-id-type="pmid">29953947</pub-id></citation></ref>
<ref id="B8"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Compton</surname> <given-names>R. J.</given-names></name> <name><surname>Banich</surname> <given-names>M. T.</given-names></name> <name><surname>Mohanty</surname> <given-names>A.</given-names></name> <name><surname>Milham</surname> <given-names>M. P.</given-names></name> <name><surname>Herrington</surname> <given-names>J.</given-names></name> <name><surname>Miller</surname> <given-names>G. A.</given-names></name><etal/></person-group> (<year>2003</year>). <article-title>Paying attention to emotion: An fMRI investigation of cognitive and emotional Stroop tasks.</article-title> <source><italic>Cogn. Affect. Behav. Neurosci.</italic></source> <volume>3</volume> <fpage>81</fpage>&#x2013;<lpage>96</lpage>. <pub-id pub-id-type="doi">10.3758/CABN.3.2.81</pub-id> <pub-id pub-id-type="pmid">12943324</pub-id></citation></ref>
<ref id="B9"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Corbetta</surname> <given-names>M.</given-names></name> <name><surname>Shulman</surname> <given-names>G. L.</given-names></name></person-group> (<year>2002</year>). <article-title>Control of goal-directed and stimulus-driven attention in the brain.</article-title> <source><italic>Nat. Rev. Neurosci.</italic></source> <volume>3</volume> <fpage>201</fpage>&#x2013;<lpage>215</lpage>. <pub-id pub-id-type="doi">10.1038/nrn755</pub-id> <pub-id pub-id-type="pmid">11994752</pub-id></citation></ref>
<ref id="B10"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cronbach</surname> <given-names>L. J.</given-names></name></person-group> (<year>1951</year>). <article-title>Coefficient alpha and the internal structure of tests.</article-title> <source><italic>Psychometrika</italic></source> <volume>16</volume> <fpage>297</fpage>&#x2013;<lpage>334</lpage>. <pub-id pub-id-type="doi">10.1007/BF02310555</pub-id></citation></ref>
<ref id="B11"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dauer</surname> <given-names>T.</given-names></name> <name><surname>Nguyen</surname> <given-names>D. T.</given-names></name> <name><surname>Gang</surname> <given-names>N.</given-names></name> <name><surname>Dmochowski</surname> <given-names>J. P.</given-names></name> <name><surname>Berger</surname> <given-names>J.</given-names></name> <name><surname>Kaneshiro</surname> <given-names>B.</given-names></name></person-group> (<year>2021</year>). <article-title>Inter-subject correlation while listening to minimalist music: A study of electrophysiological and behavioral responses to steve reich&#x2019;s piano phase.</article-title> <source><italic>Front. Neurosci.</italic></source> <volume>15</volume>:<issue>702067</issue>. <pub-id pub-id-type="doi">10.3389/fnins.2021.702067</pub-id> <pub-id pub-id-type="pmid">34955706</pub-id></citation></ref>
<ref id="B12"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Delorme</surname> <given-names>A.</given-names></name> <name><surname>Makeig</surname> <given-names>S.</given-names></name></person-group> (<year>2004</year>). <article-title>EEGLAB: An open source toolbox for analysis of single-trial EEG dynamics including independent component analysis.</article-title> <source><italic>J. Neurosci. Methods</italic></source> <volume>134</volume> <fpage>9</fpage>&#x2013;<lpage>21</lpage>. <pub-id pub-id-type="doi">10.1016/j.jneumeth.2003.10.009</pub-id> <pub-id pub-id-type="pmid">15102499</pub-id></citation></ref>
<ref id="B13"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Desikan</surname> <given-names>R. S.</given-names></name> <name><surname>S&#x00E9;gonne</surname> <given-names>F.</given-names></name> <name><surname>Fischl</surname> <given-names>B.</given-names></name> <name><surname>Quinn</surname> <given-names>B. T.</given-names></name> <name><surname>Dickerson</surname> <given-names>B. C.</given-names></name> <name><surname>Blacker</surname> <given-names>D.</given-names></name><etal/></person-group> (<year>2006</year>). <article-title>An automated labeling system for subdividing the human cerebral cortex on MRI scans into gyral based regions of interest.</article-title> <source><italic>Neuroimage</italic></source> <volume>31</volume> <fpage>968</fpage>&#x2013;<lpage>980</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2006.01.021</pub-id> <pub-id pub-id-type="pmid">16530430</pub-id></citation></ref>
<ref id="B14"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Di</surname> <given-names>X.</given-names></name> <name><surname>Biswal</surname> <given-names>B. B.</given-names></name></person-group> (<year>2020</year>). <article-title>Intersubject consistent dynamic connectivity during natural vision revealed by functional MRI.</article-title> <source><italic>Neuroimage</italic></source> <volume>216</volume>:<issue>116698</issue>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2020.116698</pub-id> <pub-id pub-id-type="pmid">32130972</pub-id></citation></ref>
<ref id="B15"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ding</surname> <given-names>L.</given-names></name> <name><surname>He</surname> <given-names>B.</given-names></name></person-group> (<year>2008</year>). <article-title>Sparse source imaging in electroencephalography with accurate field modeling.</article-title> <source><italic>Hum. Brain Mapp.</italic></source> <volume>29</volume> <fpage>1053</fpage>&#x2013;<lpage>1067</lpage>. <pub-id pub-id-type="doi">10.1002/hbm.20448</pub-id> <pub-id pub-id-type="pmid">17894400</pub-id></citation></ref>
<ref id="B16"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dmochowski</surname> <given-names>J. P.</given-names></name> <name><surname>Bezdek</surname> <given-names>M. A.</given-names></name> <name><surname>Abelson</surname> <given-names>B. P.</given-names></name> <name><surname>Johnson</surname> <given-names>J. S.</given-names></name> <name><surname>Schumacher</surname> <given-names>E. H.</given-names></name> <name><surname>Parra</surname> <given-names>L. C.</given-names></name></person-group> (<year>2014</year>). <article-title>Audience preferences are predicted by temporal reliability of neural processing.</article-title> <source><italic>Nat. Commun.</italic></source> <volume>5</volume>:<issue>4567</issue>. <pub-id pub-id-type="doi">10.1038/ncomms5567</pub-id> <pub-id pub-id-type="pmid">25072833</pub-id></citation></ref>
<ref id="B17"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dmochowski</surname> <given-names>J. P.</given-names></name> <name><surname>Sajda</surname> <given-names>P.</given-names></name> <name><surname>Dias</surname> <given-names>J.</given-names></name> <name><surname>Parra</surname> <given-names>L. C.</given-names></name></person-group> (<year>2012</year>). <article-title>Correlated components of ongoing EEG point to emotionally laden attention &#x2013; a possible marker of engagement?</article-title> <source><italic>Front. Hum. Neurosci.</italic></source> <volume>6</volume>:<issue>112</issue>. <pub-id pub-id-type="doi">10.3389/fnhum.2012.00112</pub-id> <pub-id pub-id-type="pmid">22623915</pub-id></citation></ref>
<ref id="B18"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dosenbach</surname> <given-names>N. U. F.</given-names></name> <name><surname>Fair</surname> <given-names>D. A.</given-names></name> <name><surname>Miezin</surname> <given-names>F. M.</given-names></name> <name><surname>Cohen</surname> <given-names>A. L.</given-names></name> <name><surname>Wenger</surname> <given-names>K. K.</given-names></name> <name><surname>Dosenbach</surname> <given-names>R. A. T.</given-names></name><etal/></person-group> (<year>2007</year>). <article-title>Distinct brain networks for adaptive and stable task control in humans.</article-title> <source><italic>Proc. Natl. Acad. Sci.</italic></source> <volume>104</volume> <fpage>11073</fpage>&#x2013;<lpage>11078</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.0704320104</pub-id> <pub-id pub-id-type="pmid">17576922</pub-id></citation></ref>
<ref id="B19"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dravida</surname> <given-names>S.</given-names></name> <name><surname>Ono</surname> <given-names>Y.</given-names></name> <name><surname>Noah</surname> <given-names>J. A.</given-names></name> <name><surname>Zhang</surname> <given-names>X. Z.</given-names></name> <name><surname>Hirsch</surname> <given-names>J.</given-names></name></person-group> (<year>2019</year>). <article-title>Co-localization of theta-band activity and hemodynamic responses during face perception: Simultaneous electroencephalography and functional near-infrared spectroscopy recordings.</article-title> <source><italic>Neurophotonics</italic></source> <volume>6</volume>:<issue>1</issue>. <pub-id pub-id-type="doi">10.1117/1.NPh.6.4.045002</pub-id></citation></ref>
<ref id="B20"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Faul</surname> <given-names>F.</given-names></name> <name><surname>Erdfelder</surname> <given-names>E.</given-names></name> <name><surname>Buchner</surname> <given-names>A.</given-names></name> <name><surname>Lang</surname> <given-names>A.-G.</given-names></name></person-group> (<year>2009</year>). <article-title>Statistical power analyses using G&#x002A;Power 3.1: Tests for correlation and regression analyses.</article-title> <source><italic>Behav. Res. Methods</italic></source> <volume>41</volume> <fpage>1149</fpage>&#x2013;<lpage>1160</lpage>. <pub-id pub-id-type="doi">10.3758/BRM.41.4.1149</pub-id> <pub-id pub-id-type="pmid">19897823</pub-id></citation></ref>
<ref id="B21"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Feng</surname> <given-names>C.</given-names></name> <name><surname>Thompson</surname> <given-names>W. K.</given-names></name> <name><surname>Paulus</surname> <given-names>M. P.</given-names></name></person-group> (<year>2022</year>). <article-title>Effect sizes of associations between neuroimaging measures and affective symptoms: A meta-analysis.</article-title> <source><italic>Depress. Anxiety</italic></source> <volume>39</volume> <fpage>19</fpage>&#x2013;<lpage>25</lpage>. <pub-id pub-id-type="doi">10.1002/da.23215</pub-id> <pub-id pub-id-type="pmid">34516701</pub-id></citation></ref>
<ref id="B22"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Freeman</surname> <given-names>W. J.</given-names></name></person-group> (<year>2004</year>). <article-title>Origin, structure, and role of background EEG activity. Part 1. Analytic amplitude.</article-title> <source><italic>Clin. Neurophysiol.</italic></source> <volume>115</volume> <fpage>2077</fpage>&#x2013;<lpage>2088</lpage>. <pub-id pub-id-type="doi">10.1016/j.clinph.2004.02.029</pub-id> <pub-id pub-id-type="pmid">15294210</pub-id></citation></ref>
<ref id="B23"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gerdes</surname> <given-names>A. B. M.</given-names></name> <name><surname>Wieser</surname> <given-names>M. J.</given-names></name> <name><surname>Alpers</surname> <given-names>G. W.</given-names></name></person-group> (<year>2014</year>). <article-title>Emotional pictures and sounds: A review of multimodal interactions of emotion cues in multiple domains.</article-title> <source><italic>Front. Psychol.</italic></source> <volume>5</volume>:<issue>1351</issue>. <pub-id pub-id-type="doi">10.3389/fpsyg.2014.01351</pub-id> <pub-id pub-id-type="pmid">25520679</pub-id></citation></ref>
<ref id="B24"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Giannakakis</surname> <given-names>G.</given-names></name> <name><surname>Grigoriadis</surname> <given-names>D.</given-names></name> <name><surname>Tsiknakis</surname> <given-names>M.</given-names></name></person-group> (<year>2015</year>). &#x201C;<article-title>Detection of stress/anxiety state from EEG features during video watching</article-title>,&#x201D; in <source><italic>proceeding of the 2015 37th annual international conference of the IEEE Engineering in medicine and biology society (EMBC)</italic></source>, (<publisher-loc>Piscataway, NJ</publisher-loc>: <publisher-name>IEEE</publisher-name>), <fpage>6034</fpage>&#x2013;<lpage>6037</lpage>. <pub-id pub-id-type="doi">10.1109/EMBC.2015.7319767</pub-id> <pub-id pub-id-type="pmid">26737667</pub-id></citation></ref>
<ref id="B25"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>G&#x00FC;ntekin</surname> <given-names>B.</given-names></name> <name><surname>Basar</surname> <given-names>E.</given-names></name></person-group> (<year>2007</year>). <article-title>Emotional face expressions are differentiated with brain oscillations.</article-title> <source><italic>Int. J. Psychophysiol.</italic></source> <volume>64</volume> <fpage>91</fpage>&#x2013;<lpage>100</lpage>. <pub-id pub-id-type="doi">10.1016/j.ijpsycho.2006.07.003</pub-id> <pub-id pub-id-type="pmid">17156875</pub-id></citation></ref>
<ref id="B26"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>G&#x00FC;ntekin</surname> <given-names>B.</given-names></name> <name><surname>Ba&#x015F;ar</surname> <given-names>E.</given-names></name></person-group> (<year>2010</year>). <article-title>Event-related beta oscillations are affected by emotional eliciting stimuli.</article-title> <source><italic>Neurosci. Lett.</italic></source> <volume>483</volume> <fpage>173</fpage>&#x2013;<lpage>178</lpage>. <pub-id pub-id-type="doi">10.1016/j.neulet.2010.08.002</pub-id> <pub-id pub-id-type="pmid">20692324</pub-id></citation></ref>
<ref id="B27"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Guo</surname> <given-names>C. C.</given-names></name> <name><surname>Nguyen</surname> <given-names>V. T.</given-names></name> <name><surname>Hyett</surname> <given-names>M. P.</given-names></name> <name><surname>Parker</surname> <given-names>G. B.</given-names></name> <name><surname>Breakspear</surname> <given-names>M. J.</given-names></name></person-group> (<year>2015</year>). <article-title>Out-of-sync: Disrupted neural activity in emotional circuitry during film viewing in melancholic depression.</article-title> <source><italic>Sci. Rep.</italic></source> <volume>5</volume>:<issue>11605</issue>. <pub-id pub-id-type="doi">10.1038/srep11605</pub-id> <pub-id pub-id-type="pmid">26112251</pub-id></citation></ref>
<ref id="B28"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hasson</surname> <given-names>U.</given-names></name> <name><surname>Nir</surname> <given-names>Y.</given-names></name> <name><surname>Levy</surname> <given-names>I.</given-names></name> <name><surname>Fuhrmann</surname> <given-names>G.</given-names></name> <name><surname>Malach</surname> <given-names>R.</given-names></name></person-group> (<year>2004</year>). <article-title>Intersubject synchronization of cortical activity during natural vision.</article-title> <source><italic>Science</italic></source> <volume>303</volume> <fpage>1634</fpage>&#x2013;<lpage>1640</lpage>. <pub-id pub-id-type="doi">10.1126/science.1089506</pub-id> <pub-id pub-id-type="pmid">15016991</pub-id></citation></ref>
<ref id="B29"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Holmes</surname> <given-names>C. J.</given-names></name> <name><surname>Hoge</surname> <given-names>R.</given-names></name> <name><surname>Collins</surname> <given-names>L.</given-names></name> <name><surname>Woods</surname> <given-names>R.</given-names></name> <name><surname>Toga</surname> <given-names>A. W.</given-names></name> <name><surname>Evans</surname> <given-names>A. C.</given-names></name></person-group> (<year>1998</year>). <article-title>Enhancement of MR images using registration for signal averaging.</article-title> <source><italic>J. Comput. Assist. Tomogr.</italic></source> <volume>22</volume> <fpage>324</fpage>&#x2013;<lpage>333</lpage>. <pub-id pub-id-type="doi">10.1097/00004728-199803000-00032</pub-id> <pub-id pub-id-type="pmid">9530404</pub-id></citation></ref>
<ref id="B30"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>J&#x00E4;&#x00E4;skel&#x00E4;inen</surname> <given-names>I. P.</given-names></name> <name><surname>Koskentalo</surname> <given-names>K.</given-names></name> <name><surname>Balk</surname> <given-names>M. H.</given-names></name> <name><surname>Autti</surname> <given-names>T.</given-names></name> <name><surname>Kauram&#x00E4;ki</surname> <given-names>J.</given-names></name> <name><surname>Pomren</surname> <given-names>C.</given-names></name><etal/></person-group> (<year>2008</year>). <article-title>Inter-Subject synchronization of prefrontal cortex hemodynamic activity during natural viewing.</article-title> <source><italic>Open Neuroimag. J.</italic></source> <volume>2</volume> <fpage>14</fpage>&#x2013;<lpage>19</lpage>. <pub-id pub-id-type="doi">10.2174/1874440000802010014</pub-id> <pub-id pub-id-type="pmid">19018313</pub-id></citation></ref>
<ref id="B31"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kauppi</surname> <given-names>J. P.</given-names></name></person-group> (<year>2010</year>). <article-title>Inter-subject correlation of brain hemodynamic responses during watching a movie: Localization in space and frequency.</article-title> <source><italic>Front. Neuroinform.</italic></source> <volume>4</volume>:<issue>5</issue>. <pub-id pub-id-type="doi">10.3389/fninf.2010.00005</pub-id> <pub-id pub-id-type="pmid">20428497</pub-id></citation></ref>
<ref id="B32"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kessler</surname> <given-names>R. C.</given-names></name> <name><surname>Petukhova</surname> <given-names>M.</given-names></name> <name><surname>Sampson</surname> <given-names>N. A.</given-names></name> <name><surname>Zaslavsky</surname> <given-names>A. M.</given-names></name> <name><surname>Wittchen</surname> <given-names>H.-U.</given-names></name></person-group> (<year>2012</year>). <article-title>Twelve-month and lifetime prevalence and lifetime morbid risk of anxiety and mood disorders in the United States.</article-title> <source><italic>Int. J. Methods Psychiatr. Res.</italic></source> <volume>21</volume> <fpage>169</fpage>&#x2013;<lpage>184</lpage>. <pub-id pub-id-type="doi">10.1002/mpr.1359</pub-id> <pub-id pub-id-type="pmid">22865617</pub-id></citation></ref>
<ref id="B33"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ki</surname> <given-names>J. J.</given-names></name> <name><surname>Kelly</surname> <given-names>S. P.</given-names></name> <name><surname>Parra</surname> <given-names>L. C.</given-names></name></person-group> (<year>2016</year>). <article-title>Attention strongly modulates reliability of neural responses to naturalistic narrative stimuli.</article-title> <source><italic>J. Neurosci.</italic></source> <volume>36</volume> <fpage>3092</fpage>&#x2013;<lpage>3101</lpage>. <pub-id pub-id-type="doi">10.1523/JNEUROSCI.2942-15.2016</pub-id> <pub-id pub-id-type="pmid">26961961</pub-id></citation></ref>
<ref id="B34"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Klimesch</surname> <given-names>W.</given-names></name> <name><surname>Doppelmayr</surname> <given-names>M.</given-names></name> <name><surname>Russegger</surname> <given-names>H.</given-names></name> <name><surname>Pachinger</surname> <given-names>T.</given-names></name> <name><surname>Schwaiger</surname> <given-names>J.</given-names></name></person-group> (<year>1998</year>). <article-title>Induced alpha band power changes in the human EEG and attention.</article-title> <source><italic>Neurosci. Lett.</italic></source> <volume>244</volume> <fpage>73</fpage>&#x2013;<lpage>76</lpage>. <pub-id pub-id-type="doi">10.1016/S0304-3940(98)00122-0</pub-id></citation></ref>
<ref id="B35"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Knyazev</surname> <given-names>G. G.</given-names></name> <name><surname>Savostyanov</surname> <given-names>A. N.</given-names></name> <name><surname>Levin</surname> <given-names>E. A.</given-names></name></person-group> (<year>2004</year>). <article-title>Alpha oscillations as a correlate of trait anxiety.</article-title> <source><italic>Int. J. Psychophysiol.</italic></source> <volume>53</volume> <fpage>147</fpage>&#x2013;<lpage>160</lpage>. <pub-id pub-id-type="doi">10.1016/j.ijpsycho.2004.03.001</pub-id> <pub-id pub-id-type="pmid">15210292</pub-id></citation></ref>
<ref id="B36"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kothe</surname> <given-names>C.</given-names></name> <name><surname>Medine</surname> <given-names>D.</given-names></name> <name><surname>Boulay</surname> <given-names>C.</given-names></name> <name><surname>Grivich</surname> <given-names>M.</given-names></name> <name><surname>Stenner</surname> <given-names>T.</given-names></name></person-group> (<year>2019</year>). <source><italic>Lab streaming layer.</italic></source> <comment>Available online at</comment>: <ext-link ext-link-type="uri" xlink:href="https://labstreaminglayer.readthedocs.io/">https://labstreaminglayer.readthedocs.io/</ext-link> <comment>(accessed January 11, 2022)</comment>.</citation></ref>
<ref id="B37"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kroenke</surname> <given-names>K.</given-names></name> <name><surname>Spitzer</surname> <given-names>R. L.</given-names></name> <name><surname>Williams</surname> <given-names>J. B.</given-names></name></person-group> (<year>2001</year>). <article-title>The PHQ-9: Validity of a brief depression severity measure.</article-title> <source><italic>J. Gen. Intern. Med.</italic></source> <volume>16</volume> <fpage>606</fpage>&#x2013;<lpage>613</lpage>. <pub-id pub-id-type="doi">10.1046/j.1525-1497.2001.016009606.x</pub-id> <pub-id pub-id-type="pmid">11556941</pub-id></citation></ref>
<ref id="B38"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>W.</given-names></name> <name><surname>Zhao</surname> <given-names>Z.</given-names></name> <name><surname>Chen</surname> <given-names>D.</given-names></name> <name><surname>Peng</surname> <given-names>Y.</given-names></name> <name><surname>Lu</surname> <given-names>Z.</given-names></name></person-group> (<year>2022</year>). <article-title>Prevalence and associated factors of depression and anxiety symptoms among college students: A systematic review and meta-analysis.</article-title> <source><italic>J. Child Psychol. Psychiatry</italic></source> <volume>63</volume> <fpage>1222</fpage>&#x2013;<lpage>1230</lpage>. <pub-id pub-id-type="doi">10.1111/jcpp.13606</pub-id> <pub-id pub-id-type="pmid">35297041</pub-id></citation></ref>
<ref id="B39"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>X.</given-names></name> <name><surname>Zhu</surname> <given-names>Y.</given-names></name> <name><surname>Vuoriainen</surname> <given-names>E.</given-names></name> <name><surname>Ye</surname> <given-names>C.</given-names></name> <name><surname>Astikainen</surname> <given-names>P.</given-names></name></person-group> (<year>2021</year>). <article-title>Decreased intersubject synchrony in dynamic valence ratings of sad movie contents in dysphoric individuals.</article-title> <source><italic>Sci. Rep.</italic></source> <volume>11</volume>:<issue>14419</issue>. <pub-id pub-id-type="doi">10.1038/s41598-021-93825-1</pub-id> <pub-id pub-id-type="pmid">34257384</pub-id></citation></ref>
<ref id="B40"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Maffei</surname> <given-names>A.</given-names></name></person-group> (<year>2020</year>). <article-title>Spectrally resolved EEG intersubject correlation reveals distinct cortical oscillatory patterns during free-viewing of affective scenes.</article-title> <source><italic>Psychophysiology</italic></source> <volume>57</volume>:<issue>e13652</issue>. <pub-id pub-id-type="doi">10.1111/psyp.13652</pub-id> <pub-id pub-id-type="pmid">33460185</pub-id></citation></ref>
<ref id="B41"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Morrison</surname> <given-names>A. S.</given-names></name> <name><surname>Mateen</surname> <given-names>M. A.</given-names></name> <name><surname>Brozovich</surname> <given-names>F. A.</given-names></name> <name><surname>Zaki</surname> <given-names>J.</given-names></name> <name><surname>Goldin</surname> <given-names>P. R.</given-names></name> <name><surname>Heimberg</surname> <given-names>R. G.</given-names></name><etal/></person-group> (<year>2016</year>). <article-title>Empathy for positive and negative emotions in social anxiety disorder.</article-title> <source><italic>Behav. Res. Ther.</italic></source> <volume>87</volume> <fpage>232</fpage>&#x2013;<lpage>242</lpage>. <pub-id pub-id-type="doi">10.1016/j.brat.2016.10.005</pub-id> <pub-id pub-id-type="pmid">27816799</pub-id></citation></ref>
<ref id="B42"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Nummenmaa</surname> <given-names>L.</given-names></name> <name><surname>Glerean</surname> <given-names>E.</given-names></name> <name><surname>Viinikainen</surname> <given-names>M.</given-names></name> <name><surname>J&#x00E4;&#x00E4;skel&#x00E4;inen</surname> <given-names>I. P.</given-names></name> <name><surname>Hari</surname> <given-names>R.</given-names></name> <name><surname>Sams</surname> <given-names>M.</given-names></name></person-group> (<year>2012</year>). <article-title>Emotions promote social interaction by synchronizing brain activity across individuals.</article-title> <source><italic>Proc. Natl. Acad. Sci.</italic></source> <volume>109</volume> <fpage>9599</fpage>&#x2013;<lpage>9604</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.1206095109</pub-id> <pub-id pub-id-type="pmid">22623534</pub-id></citation></ref>
<ref id="B43"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ojeda</surname> <given-names>A.</given-names></name> <name><surname>Kreutz-Delgado</surname> <given-names>K.</given-names></name> <name><surname>Mishra</surname> <given-names>J.</given-names></name></person-group> (<year>2021</year>). <article-title>Bridging M/EEG source imaging and independent component analysis frameworks using biologically inspired sparsity priors.</article-title> <source><italic>Neural Comput.</italic></source> <volume>33</volume> <fpage>2408</fpage>&#x2013;<lpage>2438</lpage>. <pub-id pub-id-type="doi">10.1162/neco_a_01415</pub-id></citation></ref>
<ref id="B44"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ojeda</surname> <given-names>A.</given-names></name> <name><surname>Kreutz-Delgado</surname> <given-names>K.</given-names></name> <name><surname>Mullen</surname> <given-names>T.</given-names></name></person-group> (<year>2018</year>). <article-title>Fast and robust Block-Sparse Bayesian learning for EEG source imaging.</article-title> <source><italic>Neuroimage</italic></source> <volume>174</volume> <fpage>449</fpage>&#x2013;<lpage>462</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2018.03.048</pub-id> <pub-id pub-id-type="pmid">29596978</pub-id></citation></ref>
<ref id="B45"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pascual-Marqui</surname> <given-names>R. D.</given-names></name> <name><surname>Michel</surname> <given-names>C. M.</given-names></name> <name><surname>Lehmann</surname> <given-names>D.</given-names></name></person-group> (<year>1994</year>). <article-title>Low resolution electromagnetic tomography: A new method for localizing electrical activity in the brain.</article-title> <source><italic>Int. J. Psychophysiol.</italic></source> <volume>18</volume> <fpage>49</fpage>&#x2013;<lpage>65</lpage>. <pub-id pub-id-type="doi">10.1016/0167-8760(84)90014-X</pub-id></citation></ref>
<ref id="B46"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Peylo</surname> <given-names>C.</given-names></name> <name><surname>Hilla</surname> <given-names>Y.</given-names></name> <name><surname>Sauseng</surname> <given-names>P.</given-names></name></person-group> (<year>2021</year>). <article-title>Cause or consequence? Alpha oscillations in visuospatial attention.</article-title> <source><italic>Trends Neurosci.</italic></source> <volume>44</volume> <fpage>705</fpage>&#x2013;<lpage>713</lpage>. <pub-id pub-id-type="doi">10.1016/j.tins.2021.05.004</pub-id> <pub-id pub-id-type="pmid">34167840</pub-id></citation></ref>
<ref id="B47"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pic&#x00F3;-P&#x00E9;rez</surname> <given-names>M.</given-names></name> <name><surname>Radua</surname> <given-names>J.</given-names></name> <name><surname>Steward</surname> <given-names>T.</given-names></name> <name><surname>Mench&#x00F3;n</surname> <given-names>J. M.</given-names></name> <name><surname>Soriano-Mas</surname> <given-names>C.</given-names></name></person-group> (<year>2017</year>). <article-title>Emotion regulation in mood and anxiety disorders: A meta-analysis of fMRI cognitive reappraisal studies.</article-title> <source><italic>Prog. Neuropsychopharmacol. Biol. Psychiatry</italic></source> <volume>79</volume> <fpage>96</fpage>&#x2013;<lpage>104</lpage>. <pub-id pub-id-type="doi">10.1016/j.pnpbp.2017.06.001</pub-id> <pub-id pub-id-type="pmid">28579400</pub-id></citation></ref>
<ref id="B48"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pittelkow</surname> <given-names>M. M.</given-names></name> <name><surname>Aan Het Rot</surname> <given-names>M.</given-names></name> <name><surname>Seidel</surname> <given-names>L. J.</given-names></name> <name><surname>Feyel</surname> <given-names>N.</given-names></name> <name><surname>Roest</surname> <given-names>A. M.</given-names></name></person-group> (<year>2021</year>). <article-title>Social anxiety and empathy: A systematic review and meta-analysis.</article-title> <source><italic>J. Anxiety Disord.</italic></source> <volume>78</volume>:<issue>102357</issue>. <pub-id pub-id-type="doi">10.1016/j.janxdis.2021.102357</pub-id> <pub-id pub-id-type="pmid">33588287</pub-id></citation></ref>
<ref id="B49"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Posada-Quintero</surname> <given-names>H. F.</given-names></name> <name><surname>Reljin</surname> <given-names>N.</given-names></name> <name><surname>Bolkhovsky</surname> <given-names>J. B.</given-names></name> <name><surname>Orjuela-Ca&#x00F1;&#x00F3;n</surname> <given-names>A. D.</given-names></name> <name><surname>Chon</surname> <given-names>K. H.</given-names></name></person-group> (<year>2019</year>). <article-title>Brain activity correlates with cognitive performance deterioration during sleep deprivation.</article-title> <source><italic>Front. Neurosci.</italic></source> <volume>13</volume>:<issue>1001</issue>. <pub-id pub-id-type="doi">10.3389/fnins.2019.01001</pub-id> <pub-id pub-id-type="pmid">31607847</pub-id></citation></ref>
<ref id="B50"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Poulsen</surname> <given-names>A. T.</given-names></name> <name><surname>Kamronn</surname> <given-names>S.</given-names></name> <name><surname>Dmochowski</surname> <given-names>J.</given-names></name> <name><surname>Parra</surname> <given-names>L. C.</given-names></name> <name><surname>Hansen</surname> <given-names>L. K.</given-names></name></person-group> (<year>2017</year>). <article-title>EEG in the classroom: Synchronised neural recordings during video presentation.</article-title> <source><italic>Sci. Rep.</italic></source> <volume>7</volume>:<issue>43916</issue>. <pub-id pub-id-type="doi">10.1038/srep43916</pub-id> <pub-id pub-id-type="pmid">28266588</pub-id></citation></ref>
<ref id="B51"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Price</surname> <given-names>J.</given-names></name> <name><surname>Budzynshi</surname> <given-names>T.</given-names></name></person-group> (<year>2009</year>). &#x201C;<article-title>Anxiety, EEG patterns, and neurofeedback</article-title>,&#x201D; in <source><italic>Introduction to quantitative EEG and neurofeedback: Advanced theory and applications</italic></source>, <role>eds</role> <person-group person-group-type="editor"><name><surname>Evans</surname> <given-names>J. R.</given-names></name> <name><surname>Budzynski</surname> <given-names>T.</given-names></name> <name><surname>Budzynski</surname> <given-names>H.</given-names></name> <name><surname>Abarbanel</surname> <given-names>A.</given-names></name></person-group> (<publisher-loc>Amsterdam</publisher-loc>: <publisher-name>Elsevier</publisher-name>), <fpage>453</fpage>&#x2013;<lpage>470</lpage>.</citation></ref>
<ref id="B52"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ray</surname> <given-names>W. J.</given-names></name> <name><surname>Cole</surname> <given-names>H. W.</given-names></name></person-group> (<year>1985</year>). <article-title>EEG alpha activity reflects attentional demands, and beta activity reflects emotional and cognitive processes.</article-title> <source><italic>Science</italic></source> <volume>228</volume> <fpage>750</fpage>&#x2013;<lpage>752</lpage>. <pub-id pub-id-type="doi">10.1126/science.3992243</pub-id> <pub-id pub-id-type="pmid">3992243</pub-id></citation></ref>
<ref id="B53"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Regev</surname> <given-names>M.</given-names></name> <name><surname>Honey</surname> <given-names>C. J.</given-names></name> <name><surname>Simony</surname> <given-names>E.</given-names></name> <name><surname>Hasson</surname> <given-names>U.</given-names></name></person-group> (<year>2013</year>). <article-title>Selective and invariant neural responses to spoken and written narratives.</article-title> <source><italic>J. Neurosci.</italic></source> <volume>33</volume> <fpage>15978</fpage>&#x2013;<lpage>15988</lpage>. <pub-id pub-id-type="doi">10.1523/JNEUROSCI.1580-13.2013</pub-id> <pub-id pub-id-type="pmid">24089502</pub-id></citation></ref>
<ref id="B54"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rousselet</surname> <given-names>G. A.</given-names></name> <name><surname>Pernet</surname> <given-names>C. R.</given-names></name></person-group> (<year>2012</year>). <article-title>Improving standards in brain-behavior correlation analyses.</article-title> <source><italic>Front. Hum. Neurosci.</italic></source> <volume>6</volume>:<issue>119</issue>. <pub-id pub-id-type="doi">10.3389/fnhum.2012.00119</pub-id> <pub-id pub-id-type="pmid">22563313</pub-id></citation></ref>
<ref id="B55"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Schutter</surname> <given-names>D. J. L. G.</given-names></name> <name><surname>Putman</surname> <given-names>P.</given-names></name> <name><surname>Hermans</surname> <given-names>E.</given-names></name> <name><surname>van Honk</surname> <given-names>J.</given-names></name></person-group> (<year>2001</year>). <article-title>Parietal electroencephalogram beta asymmetry and selective attention to angry facial expressions in healthy human subjects.</article-title> <source><italic>Neurosci. Lett.</italic></source> <volume>314</volume> <fpage>13</fpage>&#x2013;<lpage>16</lpage>.</citation></ref>
<ref id="B56"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Spitzer</surname> <given-names>R. L.</given-names></name> <name><surname>Kroenke</surname> <given-names>K.</given-names></name> <name><surname>Williams</surname> <given-names>J. B. W.</given-names></name> <name><surname>L&#x00F6;we</surname> <given-names>B.</given-names></name></person-group> (<year>2006</year>). <article-title>A brief measure for assessing generalized anxiety disorder: The GAD-7.</article-title> <source><italic>Arch Intern. Med.</italic></source> <volume>166</volume> <fpage>1092</fpage>&#x2013;<lpage>1097</lpage>. <pub-id pub-id-type="doi">10.1001/archinte.166.10.1092</pub-id> <pub-id pub-id-type="pmid">16717171</pub-id></citation></ref>
<ref id="B57"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Stopczynski</surname> <given-names>A.</given-names></name> <name><surname>Stahlhut</surname> <given-names>C.</given-names></name> <name><surname>Larsen</surname> <given-names>J. E.</given-names></name> <name><surname>Petersen</surname> <given-names>M. K.</given-names></name> <name><surname>Hansen</surname> <given-names>L. K.</given-names></name></person-group> (<year>2014</year>). <article-title>The smartphone brain scanner: A portable real-time neuroimaging system.</article-title> <source><italic>PLoS One</italic></source> <volume>9</volume>:<issue>e86733</issue>. <pub-id pub-id-type="doi">10.1371/journal.pone.0086733</pub-id> <pub-id pub-id-type="pmid">24505263</pub-id></citation></ref>
<ref id="B58"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Szucs</surname> <given-names>D.</given-names></name> <name><surname>Ioannidis</surname> <given-names>J. P. A.</given-names></name></person-group> (<year>2020</year>). <article-title>Sample size evolution in neuroimaging research: An evaluation of highly-cited studies (1990&#x2013;2012) and of latest practices (2017&#x2013;2018) in high-impact journals.</article-title> <source><italic>Neuroimage</italic></source> <volume>221</volume>:<issue>117164</issue>.</citation></ref>
<ref id="B59"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Uhrig</surname> <given-names>M. K.</given-names></name> <name><surname>Trautmann</surname> <given-names>N.</given-names></name> <name><surname>Baumg&#x00E4;rtner</surname> <given-names>U.</given-names></name> <name><surname>Treede</surname> <given-names>R.-D.</given-names></name> <name><surname>Henrich</surname> <given-names>F.</given-names></name> <name><surname>Hiller</surname> <given-names>W.</given-names></name><etal/></person-group> (<year>2016</year>). <article-title>Emotion elicitation: A comparison of pictures and films.</article-title> <source><italic>Front. Psychol.</italic></source> <volume>7</volume>:<issue>180</issue>. <pub-id pub-id-type="doi">10.3389/fpsyg.2016.00180</pub-id> <pub-id pub-id-type="pmid">26925007</pub-id></citation></ref>
<ref id="B60"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Vandewalle</surname> <given-names>G.</given-names></name> <name><surname>Schwartz</surname> <given-names>S.</given-names></name> <name><surname>Grandjean</surname> <given-names>D.</given-names></name> <name><surname>Wuillaume</surname> <given-names>C.</given-names></name> <name><surname>Balteau</surname> <given-names>E.</given-names></name> <name><surname>Degueldre</surname> <given-names>C.</given-names></name><etal/></person-group> (<year>2010</year>). <article-title>Spectral quality of light modulates emotional brain responses in humans.</article-title> <source><italic>Proc. Natl. Acad. Sci.</italic></source> <volume>107</volume> <fpage>19549</fpage>&#x2013;<lpage>19554</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.1010180107</pub-id> <pub-id pub-id-type="pmid">20974959</pub-id></citation></ref>
<ref id="B61"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>M.</given-names></name> <name><surname>He</surname> <given-names>B. J.</given-names></name></person-group> (<year>2014</year>). <article-title>A cross-modal investigation of the neural substrates for ongoing cognition.</article-title> <source><italic>Front. Psychol.</italic></source> <volume>5</volume>:<issue>945</issue>.</citation></ref>
<ref id="B62"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Widmann</surname> <given-names>A.</given-names></name> <name><surname>Schr&#x00F6;ger</surname> <given-names>E.</given-names></name> <name><surname>Maess</surname> <given-names>B.</given-names></name></person-group> (<year>2015</year>). <article-title>Digital filter design for electrophysiological data &#x2013; a practical approach.</article-title> <source><italic>J. Neurosci. Methods</italic></source> <volume>250</volume> <fpage>34</fpage>&#x2013;<lpage>46</lpage>.</citation></ref>
<ref id="B63"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wilson</surname> <given-names>S. M.</given-names></name> <name><surname>Molnar-Szakacs</surname> <given-names>I.</given-names></name> <name><surname>Iacoboni</surname> <given-names>M.</given-names></name></person-group> (<year>2008</year>). <article-title>Beyond superior temporal cortex: Intersubject correlations in narrative speech comprehension.</article-title> <source><italic>Cereb. Cortex</italic></source> <volume>18</volume> <fpage>230</fpage>&#x2013;<lpage>242</lpage>. <pub-id pub-id-type="doi">10.1093/cercor/bhm049</pub-id> <pub-id pub-id-type="pmid">17504783</pub-id></citation></ref>
</ref-list>
</back>
</article>