<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Neuroergon.</journal-id>
<journal-title>Frontiers in Neuroergonomics</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Neuroergon.</abbrev-journal-title>
<issn pub-type="epub">2673-6195</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fnrgo.2021.687108</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Neuroergonomics</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Hybrid Harmony: A Multi-Person Neurofeedback Application for Interpersonal Synchrony</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Chen</surname> <given-names>Phoebe</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1074185/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Hendrikse</surname> <given-names>Sophie</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1308549/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Sargent</surname> <given-names>Kaia</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Romani</surname> <given-names>Michele</given-names></name>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1363919/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Oostrik</surname> <given-names>Matthias</given-names></name>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Wilderjans</surname> <given-names>Tom F.</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1286097/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Koole</surname> <given-names>Sander</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/58096/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Dumas</surname> <given-names>Guillaume</given-names></name>
<xref ref-type="aff" rid="aff6"><sup>6</sup></xref>
<xref ref-type="aff" rid="aff7"><sup>7</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Medine</surname> <given-names>David</given-names></name>
<xref ref-type="aff" rid="aff8"><sup>8</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/475995/overview"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Dikker</surname> <given-names>Suzanne</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff9"><sup>9</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x0002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/38999/overview"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Psychology Department, New York University</institution>, <addr-line>New York, NY</addr-line>, <country>United States</country></aff>
<aff id="aff2"><sup>2</sup><institution>Department of Clinical Psychology, Vrije Universiteit Amsterdam</institution>, <addr-line>Amsterdam</addr-line>, <country>Netherlands</country></aff>
<aff id="aff3"><sup>3</sup><institution>Methodology and Statistics Research Unit, Institute of Psychology, Leiden University</institution>, <addr-line>Leiden</addr-line>, <country>Netherlands</country></aff>
<aff id="aff4"><sup>4</sup><institution>Electrical Engineering, Mathematics and Computer Science Department, University of Twente</institution>, <addr-line>Enschede</addr-line>, <country>Netherlands</country></aff>
<aff id="aff5"><sup>5</sup><institution>Independent Researcher</institution>, <addr-line>Amsterdam</addr-line>, <country>Netherlands</country></aff>
<aff id="aff6"><sup>6</sup><institution>Department of Psychiatry, Centre Hospitalier Universitaire Sainte-Justine Research Center, University of Montreal</institution>, <addr-line>Montreal, QC</addr-line>, <country>Canada</country></aff>
<aff id="aff7"><sup>7</sup><institution>Mila &#x02013; Quebec Artificial Intelligence Institute, University of Montreal</institution>, <addr-line>Montreal, QC</addr-line>, <country>Canada</country></aff>
<aff id="aff8"><sup>8</sup><institution>Diademics Pty Ltd.</institution>, <addr-line>Mount Waverley, VIC</addr-line>, <country>Australia</country></aff>
<aff id="aff9"><sup>9</sup><institution>New York University-Max Planck Center for Language, Music, and Emotion, New York University</institution>, <addr-line>New York, NY</addr-line>, <country>United States</country></aff>
<author-notes>
<fn fn-type="edited-by"><p>Edited by: Anton Nijholt, University of Twente, Netherlands</p></fn>
<fn fn-type="edited-by"><p>Reviewed by: Shigeyuki Ikeda, RIKEN Center for Advanced Intelligence Project (AIP), Japan; Aleksandra Vuckovic, University of Glasgow, United Kingdom</p></fn>
<corresp id="c001">&#x0002A;Correspondence: Suzanne Dikker <email>suzanne.dikker&#x00040;nyu.edu</email></corresp>
<fn fn-type="other" id="fn001"><p>This article was submitted to Neurotechnology and Systems Neuroergonomics, a section of the journal Frontiers in Neuroergonomics</p></fn></author-notes>
<pub-date pub-type="epub">
<day>12</day>
<month>08</month>
<year>2021</year>
</pub-date>
<pub-date pub-type="collection">
<year>2021</year>
</pub-date>
<volume>2</volume>
<elocation-id>687108</elocation-id>
<history>
<date date-type="received">
<day>29</day>
<month>03</month>
<year>2021</year>
</date>
<date date-type="accepted">
<day>16</day>
<month>06</month>
<year>2021</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x000A9; 2021 Chen, Hendrikse, Sargent, Romani, Oostrik, Wilderjans, Koole, Dumas, Medine and Dikker.</copyright-statement>
<copyright-year>2021</copyright-year>
<copyright-holder>Chen, Hendrikse, Sargent, Romani, Oostrik, Wilderjans, Koole, Dumas, Medine and Dikker</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p></license>
</permissions>
<abstract><p>Recent years have seen a dramatic increase in studies measuring brain activity, physiological responses, and/or movement data from multiple individuals during social interaction. For example, so-called &#x0201C;hyperscanning&#x0201D; research has demonstrated that brain activity may become synchronized across people as a function of a range of factors. Such findings not only underscore the potential of hyperscanning techniques to capture meaningful aspects of naturalistic interactions, but also raise the possibility that hyperscanning can be leveraged as a tool to help improve such naturalistic interactions. Building on our previous work showing that exposing dyads to real-time inter-brain synchrony neurofeedback may help boost their interpersonal connectedness, we describe the biofeedback application Hybrid Harmony, a Brain-Computer Interface (BCI) that supports the simultaneous recording of multiple neurophysiological datastreams and the real-time visualization and sonification of inter-subject synchrony. We report results from 236 dyads experiencing synchrony neurofeedback during naturalistic face-to-face interactions, and show that pairs&#x00027; social closeness and affective personality traits can be reliably captured with the inter-brain synchrony neurofeedback protocol, which incorporates several different online inter-subject connectivity analyses that can be applied interchangeably. Hybrid Harmony can be used by researchers who wish to study the effects of synchrony biofeedback, and by biofeedback artists and serious game developers who wish to incorporate multiplayer situations into their practice.</p></abstract>
<kwd-group>
<kwd>hyperscanning</kwd>
<kwd>neurofeedback</kwd>
<kwd>brain-computer-interface</kwd>
<kwd>EEG</kwd>
<kwd>inter-brain coupling</kwd>
<kwd>real-world neuroscience</kwd>
</kwd-group>
<counts>
<fig-count count="7"/>
<table-count count="1"/>
<equation-count count="1"/>
<ref-count count="76"/>
<page-count count="13"/>
<word-count count="8200"/>
</counts>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="s1">
<title>Introduction</title>
<p>What does it mean to lose yourself in someone else? How is it possible that the mere physical presence of another human can make us believe we can conquer the world, or conversely, make us feel lonely and incapable? We know, both scientifically and intuitively, that relationships are crucial for our physical and mental well-being (Pietromonaco and Collins, <xref ref-type="bibr" rid="B58">2017</xref>). But they are also sources of frustration in their fluid, messy mix of internal inconsistencies: love and hate, inclusion and exclusion, fascination and comfort, challenge and familiarity. Can we capture this seemingly subjective, fleeting, and elusive notion of &#x0201C;being on the same wavelength&#x0201D; with another person, with objective measurement tools? And if so, can we leverage this information to guide people in their interaction with others?</p>
<p>Successful social interactions require tight spatiotemporal coordination between participants at motor, perceptual, and cognitive levels. Around a decade ago, several labs began to use a variety of methods to record (neuro)physiological data from multiple people simultaneously, a technique known as &#x0201C;hyperscanning.&#x0201D; This has allowed researchers to study dynamic coordination in a range of social situations such as ensembles performing music, multiple people performing actions together, or carrying on a conversation (Babiloni et al., <xref ref-type="bibr" rid="B5">2006</xref>; Tognoli et al., <xref ref-type="bibr" rid="B67">2007</xref>; Dumas et al., <xref ref-type="bibr" rid="B19">2010</xref>; Yun, <xref ref-type="bibr" rid="B74">2013</xref>; Zamm et al., <xref ref-type="bibr" rid="B76">2018b</xref>; see e.g., Hari et al., <xref ref-type="bibr" rid="B27">2013</xref>; Babiloni and Astolfi, <xref ref-type="bibr" rid="B4">2014</xref>; Czeszumski et al., <xref ref-type="bibr" rid="B11">2020</xref> for reviews). There now exists a growing body of work where pairs or groups of participants engage in social interactions while their brain activity, physiological responses, and (eye) movements are monitored.</p>
<p>While hyperscanning research is also conducted using hemodynamic neuroimaging tools, including functional Magnetic Resonance Imagining (fMRI; Koike et al., <xref ref-type="bibr" rid="B33">2016</xref>, <xref ref-type="bibr" rid="B32">2019</xref>; Abe et al., <xref ref-type="bibr" rid="B1">2019</xref>) but especially functional near-infrared spectroscopy (fNIRS; Scholkmann et al., <xref ref-type="bibr" rid="B62">2013</xref>; Nozawa et al., <xref ref-type="bibr" rid="B50">2016</xref>; Reindl et al., <xref ref-type="bibr" rid="B59">2018</xref>), we here focus on electroencephalography (EEG) hyperscanning. The extent to which EEG activity becomes synchronized between people is correlated with a range of factors. For example, it has been widely demonstrated, in both single-brain laboratory research and hyperscanning studies, that shared attention to the same stimuli leads to similar brain responses across individuals, and consequently, higher inter-brain synchrony (<xref ref-type="fig" rid="F1">Figure 1A</xref>; Hasson, <xref ref-type="bibr" rid="B28">2004</xref>; Dikker et al., <xref ref-type="bibr" rid="B16">2017</xref>; Czeszumski et al., <xref ref-type="bibr" rid="B11">2020</xref>). Importantly, social behavior has also been shown to serve as an (exogenous) source of interpersonal synchrony (<xref ref-type="fig" rid="F1">Figures 1B,E</xref>): Behaviors such as joint action, language, eye contact, touch, and cooperation drive synchrony in various social contexts (Dumas et al., <xref ref-type="bibr" rid="B19">2010</xref>; Dikker et al., <xref ref-type="bibr" rid="B15">2014</xref>, <xref ref-type="bibr" rid="B13">2021</xref>; Kinreich et al., <xref ref-type="bibr" rid="B30">2017</xref>; Goldstein et al., <xref ref-type="bibr" rid="B23">2018</xref>; P&#x000E9;rez et al., <xref ref-type="bibr" rid="B55">2019</xref>; Reinero et al., <xref ref-type="bibr" rid="B60">2021</xref>). Furthermore, both individuals&#x00027; social closeness and personality traits (e.g., empathy) affect people&#x00027;s social engagement during an interaction, and thus the extent to which their brain responses become synchronized (Dikker et al., <xref ref-type="bibr" rid="B16">2017</xref>, <xref ref-type="bibr" rid="B13">2021</xref>; Kinreich et al., <xref ref-type="bibr" rid="B30">2017</xref>; Goldstein et al., <xref ref-type="bibr" rid="B23">2018</xref>; Bevilacqua et al., <xref ref-type="bibr" rid="B6">2019</xref>). Participants&#x00027; mental states (e.g., focus) similarly influence participants&#x00027; engagement with each other, endogenously motivating them to make an effort to connect to each other (<xref ref-type="fig" rid="F1">Figure 1C</xref>; Dikker et al., <xref ref-type="bibr" rid="B16">2017</xref>, <xref ref-type="bibr" rid="B13">2021</xref>).</p>
<fig id="F1" position="float">
<label>Figure 1</label>
<caption><p>A summary of possible sources of inter-brain synchrony during social interaction (adapted from Dikker et al., <xref ref-type="bibr" rid="B13">2021</xref>). <bold>(A)</bold> External non-social stimuli (top) and <bold>(E)</bold> social behavior (bottom) provide exogenous sources of shared stimulus entrainment and interpersonal social coordination, respectively, leading to similar brain responses, i.e., inter-brain synchrony. <bold>(B)</bold> Both individuals&#x00027; social closeness and personality traits (e.g., affective empathy) affect their social engagement during the interaction, and thus the extent to which their brain responses become synchronized. <bold>(C)</bold> participants&#x00027; mental states (e.g., focus) similarly affect participants&#x00027; engagement with each other, intrinsically (endogenously) motivate participants to make an effort to connect to each other. <bold>(D)</bold> Such engagement can be &#x0201C;boosted&#x0201D; via extrinsic motivation, which could subsequently lead to increased inter-brain synchrony.</p></caption>
<graphic xlink:href="fnrgo-02-687108-g0001.tif"/>
</fig>
<p>Importantly, people can also be extrinsically motivated to socially engage with each other (<xref ref-type="fig" rid="F1">Figure 1D</xref>). Specifically, our group recently reported that exposing people to a hyperscanning neurofeedback environment can motivate social engagement (Dikker et al., <xref ref-type="bibr" rid="B13">2021</xref>). Using data from the interactive social neurofeedback installation The Mutual Wave Machine (<ext-link ext-link-type="uri" xlink:href="http://wp.nyu.edu/mutualwavemachine">wp.nyu.edu/mutualwavemachine</ext-link>), we show that dyads who were explicitly made aware of the social relevance of the neurofeedback environment, exhibited an increase in inter-brain coupling over time. This suggests that external factors may help boost interpersonal engagement, which raises the possibility that interpersonal synchrony biofeedback may be one fruitful avenue to pursue in such efforts.</p>
<p>However, while neurofeedback applications using data from individual brains are fairly widely used across scientific, clinical, educational, and artistic contexts (see e.g., van Hoogdalem et al., <xref ref-type="bibr" rid="B69">2020</xref> for a review), to our knowledge multi-person neurofeedback using hyperscanning EEG has been implemented primarily in game and art environments (see contributions in Kovacevic et al., <xref ref-type="bibr" rid="B34">2015</xref>; Dikker et al., <xref ref-type="bibr" rid="B14">2019</xref>, <xref ref-type="bibr" rid="B13">2021</xref>; Nijholt, <xref ref-type="bibr" rid="B46">2019</xref>; see Duan et al., <xref ref-type="bibr" rid="B17">2013</xref>; Salminen et al., <xref ref-type="bibr" rid="B61">2019</xref> for examples of scientifically oriented dual-brain neurofeedback experiments). As a result, little is known about the possible effectiveness of hyperscanning neurofeedback in improving social communication.</p>
<p>This is further complicated by the fact that consensus is lacking with regard to how synchrony should be computed (Ayrolles et al., <xref ref-type="bibr" rid="B3">2021</xref>). While some metrics have been shown to be &#x0201C;better&#x0201D; than others from a purely statistical perspective (Burgess, <xref ref-type="bibr" rid="B8">2013</xref>), only very few scholars have attempted to map computational choices with regard to interpersonal neural connectivity to psychological processes or constructs (Dumas and Fairhurst, <xref ref-type="bibr" rid="B18">2021</xref>; Hoehl et al., <xref ref-type="bibr" rid="B29">2021</xref>). This distinguishes synchrony neurofeedback from other BCI applications, such as so-called &#x0201C;P3 spellers&#x0201D; (Fazel-Rezai et al., <xref ref-type="bibr" rid="B21">2012</xref>), which are based on well-established neural signatures.</p>
<p>Because of the lack of consensus with regard to optimal synchrony metrics, we argue that it is desirable that multi-brain neurofeedback applications allow users to select from various synchrony metrics that can be used independently, and thus explore the utility of different metrics in different contexts.</p>
<p>To this end, we have developed Hybrid Harmony, a Brain-Computer Interface (BCI) that uses a hyperscanning approach to allow the collection of EEG data from two or more people simultaneously and enables users to visualize/sonify the extent to which participants&#x00027; biometrics are coupled, choosing between different synchrony metrics. These metrics, described in section Connectivity Analysis, are developed in parallel with HyPyP, an open-source Python-based pipeline that allows researchers to compute and compare different inter-brain connectivity metrics on the same dataset (Ayrolles et al., <xref ref-type="bibr" rid="B3">2021</xref>).</p>
</sec>
<sec id="s2">
<title>Software Description</title>
<sec>
<title>Overview</title>
<p>Hybrid Harmony is an open-source software package written in Python (<ext-link ext-link-type="uri" xlink:href="https://github.com/RhythmsOfRelating/HybridHarmony">https://github.com/RhythmsOfRelating/HybridHarmony</ext-link>), accompanied by a visualization module and a sonification module (<xref ref-type="fig" rid="F2">Figure 2</xref>). The software consists of a backend that handles data acquisition and performs analyses, and a Graphical User Interface (GUI) made with PyQt5 (<ext-link ext-link-type="uri" xlink:href="https://www.riverbankcomputing.com/software/pyqt/">https://www.riverbankcomputing.com/software/pyqt/</ext-link>), where users can control parameters for the analyses (<xref ref-type="fig" rid="F2">Figure 2</xref>). We introduce the software and discuss compatible hardware (EEG systems) in section Hardware, the processing pipeline including preprocessing and connectivity analysis in section Data Preprocessing, Connectivity Analysis, Normalization, and the visualization and sonification modules in section Visualization and Sonification. Data transfer protocols, i.e., LabStreamingLayer (LSL, <ext-link ext-link-type="uri" xlink:href="https://github.com/sccn/labstreaminglayer">https://github.com/sccn/labstreaminglayer</ext-link>) and Open Sound Control (OSC, Wright and Momeni, <xref ref-type="bibr" rid="B73">2011</xref>), are described in <xref ref-type="supplementary-material" rid="SM1">Supplementary Material</xref>, Data Transfer Protocol. Detailed instructions can be found on the GitHub page.</p>
<fig id="F2" position="float">
<label>Figure 2</label>
<caption><p>Hybrid Harmony software. The backend and Graphical User Interface (GUI) of Hybrid Harmony are shown in the gray box. Hybrid Harmony performs data preprocessing and connectivity analysis on the incoming EEG data from LabStreamingLayer (LSL), and outputs synchrony values to LSL and Open Sound Control (OSC; detailed in <xref ref-type="supplementary-material" rid="SM1">Supplementary Material</xref>, The Data Transfer Protocol: Open Sound Control). The output can then be recorded by LabRecorder (<xref ref-type="supplementary-material" rid="SM1">Supplementary Material</xref>, Saving Data Through LSL) and be transformed into sensory experiences through the visualization and sonification modules (section Visualization and Sonification). The GUI enables the user to control parameters for preprocessing and connectivity analysis, as well as monitor the program status on the console.</p></caption>
<graphic xlink:href="fnrgo-02-687108-g0002.tif"/>
</fig>
</sec>
<sec>
<title>Hardware</title>
<p>The tool is compatible with any EEG device that interfaces with LabStreamingLayer, and has been tested with MUSE (<ext-link ext-link-type="uri" xlink:href="https://choosemuse.com/">https://choosemuse.com/</ext-link>; Bhayee et al., <xref ref-type="bibr" rid="B7">2016</xref>), emotiv (EPOC and EPOC&#x0002B;; <ext-link ext-link-type="uri" xlink:href="https://www.emotiv.com/">https://www.emotiv.com/</ext-link>; Williams et al., <xref ref-type="bibr" rid="B72">2020</xref>), the SMARTING system from mBrainTrain (<ext-link ext-link-type="uri" xlink:href="https://mbraintrain.com/">https://mbraintrain.com/</ext-link>; Grennan et al., <xref ref-type="bibr" rid="B24">2021</xref>), and Brain Vision LiveAmp systems (<ext-link ext-link-type="uri" xlink:href="https://www.brainproducts.com/">https://www.brainproducts.com/</ext-link>; Fang et al., <xref ref-type="bibr" rid="B20">2019</xref>), and can be expanded to accommodate other systems that export data to LabStreamingLayer.</p>
</sec>
<sec>
<title>Data Preprocessing</title>
<p>The first stage of data preprocessing is a buffering procedure that holds and segments incoming data time-series. Incoming streams from LSL are stored in a 30 s buffer updating at the EEG data&#x00027;s sampling rate (e.g., 250 Hz for Brain Vision LiveAmp system). Hybrid Harmony then selects the most recent time window to perform the signal processing procedure. The time window is determined by &#x0201C;window size&#x0201D; and is 3 s by default. The rate of the analysis depends on the computation bandwidth of the system Hybrid Harmony is running on. For example, on many systems we tested data are analyzed roughly 3.5 times per second.</p>
<p>The time window is filtered with the infinite impulse response (IIR) filter (Oppenheim, <xref ref-type="bibr" rid="B51">1999</xref>) into frequency bands of interest (e.g., 8&#x02013;12 Hz for the alpha frequency band), and then Hilbert transformed to generate the instantaneous analytic signal (Oppenheim, <xref ref-type="bibr" rid="B51">1999</xref>). Users can choose to output spectral power concurrently by selecting the &#x0201C;sending power values&#x0201D; checkbox: power spectral density will be computed and sent to LSL along with connectivity values.</p>
<p>The GUI allows users to change processing parameters via &#x0201C;Frequency bands for analysis,&#x0201D; &#x0201C;Input data streams,&#x0201D; and &#x0201C;window size&#x0201D; (<xref ref-type="fig" rid="F3">Figure 3</xref>). &#x0201C;Frequency bands for analysis&#x0201D; is an editable table specifying the frequency bands of interest, where &#x0201C;Freq. Band&#x0201D; denotes the frequency band name, &#x0201C;Min. Freq&#x0201D; and &#x0201C;Max. Freq&#x0201D; denote the lower and upper bounds of the frequency band, and &#x0201C;weight&#x0201D; determines how the different bands are weighted relative to one another. <xref ref-type="fig" rid="F3">Figures 3</xref>, <xref ref-type="fig" rid="F4">4</xref> show the default setup for this table. &#x0201C;Input data streams&#x0201D; displays the name, channel count, and sampling rate of the incoming streams, and its editable cells (e.g., &#x0201C;theta channels,&#x0201D; &#x0201C;alpha channels,&#x0201D; etc.) determine the specific channels to use for each frequency band. Lastly, the &#x0201C;window size&#x0201D; text field determines the length of the data segment for the analysis.</p>
<fig id="F3" position="float">
<label>Figure 3</label>
<caption><p>The Hybrid Harmony GUI. The interface is divided into five sections: &#x0201C;Frequency bands for analysis,&#x0201D; &#x0201C;Input data streams,&#x0201D; &#x0201C;Parameters,&#x0201D; &#x0201C;Normalization parameters&#x0201D; and &#x0201C;Console,&#x0201D; the first four of which allow users to specify the parameters detailed in section Data Preprocessing, Connectivity Analysis, Normalization. The GUI facilitates three main actions shown as buttons &#x0201C;load LSL streams,&#x0201D; &#x0201C;start analysis,&#x0201D; and &#x0201C;stop analysis.&#x0201D; &#x0201C;Load LSL streams&#x0201D; will start the stream discovery (<xref ref-type="supplementary-material" rid="SM1">Supplementary Material</xref>, Stream Discovery Through LSL) in the backend; &#x0201C;start analysis&#x0201D; button initiates the analyses (section Connectivity Analysis and Normalization) and data transferring (<xref ref-type="supplementary-material" rid="SM1">Supplementary Material</xref>, Using LSL for Output). &#x0201C;Stop analysis&#x0201D; will pause the analyses and allow the parameters to be edited.</p></caption>
<graphic xlink:href="fnrgo-02-687108-g0003.tif"/>
</fig>
<fig id="F4" position="float">
<label>Figure 4</label>
<caption><p>Frequency bands for analysis and Input data streams of the Hybrid Harmony GUI. The left half of the GUI, i.e., the &#x0201C;Frequency bands for analysis&#x0201D; and &#x0201C;Input data streams&#x0201D; tables control parameters for data preprocessing (section Data Preprocessing). <bold>(A)</bold> &#x0201C;Frequency bands for analysis&#x0201D; has four editable columns: Freq. Band: Name of the frequency band; Min. Freq.: The lower bound frequency for the band; Max. Freq.: The upper bound frequency for the band; weight: Weighting factor of the current band (connectivity values will be multiplied by this factor). <bold>(B)</bold> &#x0201C;input data streams&#x0201D; has three non-editable columns: Stream ID: Name of the EEG stream; channel count: number of EEG channels; sampling rate: sampling rate of the EEG streams. It also has editable columns, corresponding to frequency band names that users typed in <bold>(A)</bold>. The columns theta channels, alpha channels, and beta channels thus determine channel indices to use for computing connectivity values.</p></caption>
<graphic xlink:href="fnrgo-02-687108-g0004.tif"/>
</fig>
</sec>
<sec>
<title>Connectivity Analysis</title>
<p>Connectivity Analysis takes the analytic signal from Data Preprocessing as input, and computes one connectivity value for each electrode pair per frequency band in every participant pair. Then, connectivity values are averaged across electrode pairs, so the output is one connectivity value per frequency band for every participant pair. The exact computations are adapted from the python-based inter-brain analysis pipeline HyPyP (Ayrolles et al., <xref ref-type="bibr" rid="B3">2021</xref>), and listed in <xref ref-type="table" rid="T1">Table 1</xref>. The user may choose from a list of connectivity measures by changing the &#x0201C;Connectivity metric&#x0201D; parameter (<xref ref-type="fig" rid="F5">Figure 5</xref>). Currently implemented metrics include coherence, imaginary coherence, envelope correlation, power correlation, phase-locking value (PLV), and Circular Correlation Coefficient (CCorr). The mathematical equations and references of these metrics are provided in <xref ref-type="table" rid="T1">Table 1</xref>. In the equations, <italic>X</italic>(<italic>i, t</italic>) denotes the analytic signal for subject <italic>x</italic> at channel <italic>i</italic> for time point <italic>t</italic>, and <italic>Y</italic>(<italic>j, t</italic>) is that for subject <italic>y</italic> at channel <italic>j</italic> for time point <italic>t</italic>, and the star sign denotes the complex conjugate. The result of each equation is one synchrony value per electrode pair, written as <italic>r</italic><sub><italic>i, j</italic></sub>, and the computation is carried out for all electrode pairs (<italic>i, j</italic>) where <italic>i</italic> belongs to subject <italic>x</italic> &#x00027;s channels and <italic>j</italic> belongs to subject <italic>y</italic> &#x00027;s channels. Note that we are computing all metrics using the analytic signal from the previous step to streamline the computation. We compute synchrony values from the analytic signals using Hilbert Transform (HT), an alternative analysis to the windowed Fast Fourier Transform (FFT). The analytic signal from HT and the spectra from the windowed FFT both represent the amplitude and phase of the signal in their real and the imaginary parts, respectively, except that the analytic signal is &#x0201C;instantaneous,&#x0201D; while the windowed FFT is an average value over a period (Kovach, <xref ref-type="bibr" rid="B35">2017</xref>). Therefore, while the cross-spectra in coherence is usually the expected value of the product between the two signals&#x00027; spectra, we used a computation (<xref ref-type="table" rid="T1">Table 1</xref>) adapted from Equation (1) (Kovach, <xref ref-type="bibr" rid="B35">2017</xref>), i.e., the cross-spectra are expressed as the expectation of the dot product between <italic>X</italic>(<italic>i, t</italic>) and the complex conjugate of <italic>Y</italic>(<italic>i, t</italic>). Using this formulation is appropriate when investigating the synchronization of signals as it allows us to measure their similarity on a sample-by-sample basis, not just as an average over a relatively long time-window.</p>
<disp-formula id="E1"><label>(1)</label><mml:math id="M1"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:mover accent="false"><mml:mrow><mml:msub><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo>^</mml:mo></mml:mover><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:mfrac><mml:mstyle displaystyle="true"><mml:munder class="msub"><mml:mrow><mml:mo>&#x0222B;</mml:mo></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:munder></mml:mstyle><mml:mi>X</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:msup><mml:mrow><mml:mi>Y</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mo>&#x0002A;</mml:mo></mml:mrow></mml:msup><mml:mi>d</mml:mi><mml:mi>t</mml:mi></mml:mtd></mml:mtr></mml:mtable></mml:math></disp-formula>
<p>Currently, only one metric at a time can be employed, but a user can run multiple instances of the software and thus output multiple metrics simultaneously. The user can then record these streams using LabRecorder (<xref ref-type="supplementary-material" rid="SM1">Supplementary Material</xref>, Saving Data Through LSL). For visualization, it is possible to differentiate the streams based on the unique source_id in the metadata of the LSL stream, and choose only one to display. However, this feature is not yet developed in our visualization module.</p>
<table-wrap position="float" id="T1">
<label>Table 1</label>
<caption><p>Connectivity metrics adapted from Ayrolles et al. (<xref ref-type="bibr" rid="B3">2021</xref>).</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left"><bold>Connectivity metrics</bold></th>
<th valign="top" align="left"><bold>Equation</bold></th>
<th valign="top" align="left"><bold>References</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Coherence</td>
<td valign="top" align="left"><inline-formula><mml:math id="M2"><mml:msub><mml:mrow><mml:mi>r</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mo>|</mml:mo><mml:msup><mml:mrow><mml:mi>X</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>&#x000B7;</mml:mo><mml:mi>Y</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mo>&#x0002A;</mml:mo></mml:mrow></mml:msup><mml:mo>|</mml:mo></mml:mrow><mml:mrow><mml:mtext>&#x000A0;</mml:mtext><mml:msqrt><mml:mrow><mml:munderover accentunder="false" accent="false"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:munderover><mml:mo>|</mml:mo><mml:mi>X</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:msup><mml:mrow><mml:mo>|</mml:mo></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup><mml:mo>&#x000B7;</mml:mo><mml:munderover accentunder="false" accent="false"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>T</mml:mi></mml:mrow></mml:munderover><mml:mo>|</mml:mo><mml:mi>Y</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:msup><mml:mrow><mml:mo>|</mml:mo></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow></mml:msqrt></mml:mrow></mml:mfrac></mml:math></inline-formula></td>
<td valign="top" align="left">Guevara and Corsi-Cabrera, <xref ref-type="bibr" rid="B25">1996</xref>; <bold>Dikker et al.</bold>, <xref ref-type="bibr" rid="B16"><bold>2017</bold></xref></td>
</tr>
<tr>
<td valign="top" align="left">Imaginary coherence</td>
<td valign="top" align="left"><inline-formula><mml:math id="M3"><mml:mrow><mml:mtext>&#x000A0;</mml:mtext><mml:msub><mml:mi>r</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mrow><mml:mo>|</mml:mo><mml:mrow><mml:mtext>imag</mml:mtext><mml:mo stretchy='false'>(</mml:mo><mml:mi>X</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy='false'>)</mml:mo><mml:mo>&#x000B7;</mml:mo><mml:mi>Y</mml:mi><mml:msup><mml:mrow><mml:mo stretchy='false'>(</mml:mo><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy='false'>)</mml:mo></mml:mrow><mml:mo>&#x02217;</mml:mo></mml:msup><mml:mo stretchy='false'>)</mml:mo></mml:mrow><mml:mo>|</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mtext>&#x000A0;</mml:mtext><mml:msqrt><mml:mrow><mml:mstyle displaystyle='false'><mml:msubsup><mml:mo>&#x02211;</mml:mo><mml:mn>1</mml:mn><mml:mi>N</mml:mi></mml:msubsup><mml:mrow><mml:msup><mml:mrow><mml:mrow><mml:mo>|</mml:mo><mml:mrow><mml:mi>X</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy='false'>)</mml:mo></mml:mrow><mml:mo>|</mml:mo></mml:mrow></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:mstyle><mml:mo>&#x000B7;</mml:mo><mml:mstyle displaystyle='false'><mml:msubsup><mml:mo>&#x02211;</mml:mo><mml:mn>1</mml:mn><mml:mi>N</mml:mi></mml:msubsup><mml:mrow><mml:msup><mml:mrow><mml:mrow><mml:mo>|</mml:mo><mml:mrow><mml:mi>Y</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy='false'>)</mml:mo></mml:mrow><mml:mo>|</mml:mo></mml:mrow></mml:mrow><mml:mn>2</mml:mn></mml:msup></mml:mrow></mml:mstyle></mml:mrow></mml:msqrt></mml:mrow></mml:mfrac></mml:mrow></mml:math></inline-formula></td>
<td valign="top" align="left">Nolte et al., <xref ref-type="bibr" rid="B47">2004</xref>; <bold>Dikker et al.</bold>, <xref ref-type="bibr" rid="B13"><bold>2021</bold></xref></td>
</tr>
<tr>
<td valign="top" align="left">Envelope correlation</td>
<td valign="top" align="left"><inline-formula><mml:math id="M4"><mml:msub><mml:mrow><mml:mi>r</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:munderover accentunder="false" accent="false"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi></mml:mrow></mml:munderover><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:msub><mml:mrow><mml:mi>v</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub><mml:mo>-</mml:mo><mml:mover accent="false" class="mml-overline"><mml:mrow><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mrow><mml:mi>v</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo accent="true">&#x000AF;</mml:mo></mml:mover></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>&#x000B7;</mml:mo><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mrow><mml:mi>v</mml:mi></mml:mrow><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub><mml:mo>-</mml:mo><mml:mover accent="false" class="mml-overline"><mml:mrow><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mrow><mml:mi>v</mml:mi></mml:mrow><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo accent="true">&#x000AF;</mml:mo></mml:mover></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:msqrt><mml:mrow><mml:munderover accentunder="false" accent="false"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi></mml:mrow></mml:munderover><mml:msup><mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mrow><mml:mi>v</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub><mml:mo>-</mml:mo><mml:mover accent="false" class="mml-overline"><mml:mrow><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mrow><mml:mi>v</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo accent="true">&#x000AF;</mml:mo></mml:mover></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow></mml:msqrt><mml:msqrt><mml:mrow><mml:munderover accentunder="false" accent="false"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi></mml:mrow></mml:munderover><mml:msup><mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mrow><mml:mi>v</mml:mi></mml:mrow><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub><mml:mo>-</mml:mo><mml:mover accent="false" class="mml-overline"><mml:mrow><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:msub><mml:mrow><mml:mi>v</mml:mi></mml:mrow><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo accent="true">&#x000AF;</mml:mo></mml:mover></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow></mml:msqrt></mml:mrow></mml:mfrac></mml:math></inline-formula></td>
<td valign="top" align="left">Mehrkanoon et al., <xref ref-type="bibr" rid="B40">2014</xref>; Clerico et al., <xref ref-type="bibr" rid="B10">2015</xref>; <bold>Zamm et al.</bold>, <xref ref-type="bibr" rid="B75"><bold>2018a</bold></xref></td>
</tr>
<tr>
<td/>
<td valign="top" align="left"><italic>where env</italic><sub><italic>x</italic></sub> &#x0003D; |<italic>X</italic>(<italic>i, t</italic>)|, <italic>env</italic><sub><italic>y</italic></sub> &#x0003D; |<italic>Y</italic>(<italic>j, t</italic>)|</td>
<td/>
</tr>
<tr>
<td valign="top" align="left">Power correlation</td>
<td valign="top" align="left"><inline-formula><mml:math id="M5"><mml:msub><mml:mrow><mml:mi>r</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:munderover accentunder="false" accent="false"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi></mml:mrow></mml:munderover><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>a</mml:mi><mml:mi>m</mml:mi><mml:msub><mml:mrow><mml:mi>p</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub><mml:mo>-</mml:mo><mml:mover accent="false" class="mml-overline"><mml:mrow><mml:mi>a</mml:mi><mml:mi>m</mml:mi><mml:msub><mml:mrow><mml:mi>p</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo accent="true">&#x000AF;</mml:mo></mml:mover></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>&#x000B7;</mml:mo><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>a</mml:mi><mml:mi>m</mml:mi><mml:msub><mml:mrow><mml:mi>p</mml:mi></mml:mrow><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub><mml:mo>-</mml:mo><mml:mover accent="false" class="mml-overline"><mml:mrow><mml:mi>a</mml:mi><mml:mi>m</mml:mi><mml:msub><mml:mrow><mml:mi>p</mml:mi></mml:mrow><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo accent="true">&#x000AF;</mml:mo></mml:mover></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:msqrt><mml:mrow><mml:munderover accentunder="false" accent="false"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi></mml:mrow></mml:munderover><mml:msup><mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>a</mml:mi><mml:mi>m</mml:mi><mml:msub><mml:mrow><mml:mi>p</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub><mml:mo>-</mml:mo><mml:mover accent="false" class="mml-overline"><mml:mrow><mml:mi>a</mml:mi><mml:mi>m</mml:mi><mml:msub><mml:mrow><mml:mi>p</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo accent="true">&#x000AF;</mml:mo></mml:mover></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow></mml:msqrt><mml:msqrt><mml:mrow><mml:munderover accentunder="false" accent="false"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi></mml:mrow></mml:munderover><mml:msup><mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>a</mml:mi><mml:mi>m</mml:mi><mml:msub><mml:mrow><mml:mi>p</mml:mi></mml:mrow><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub><mml:mo>-</mml:mo><mml:mover accent="false" class="mml-overline"><mml:mrow><mml:mi>a</mml:mi><mml:mi>m</mml:mi><mml:msub><mml:mrow><mml:mi>p</mml:mi></mml:mrow><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo accent="true">&#x000AF;</mml:mo></mml:mover></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow></mml:msqrt></mml:mrow></mml:mfrac><mml:mtext>&#x000A0;</mml:mtext></mml:math></inline-formula></td>
<td valign="top" align="left">Shaw, <xref ref-type="bibr" rid="B63">1984</xref>; <bold>Guevara and Corsi-Cabrera</bold>, <xref ref-type="bibr" rid="B25"><bold>1996</bold></xref></td>
</tr>
<tr>
<td/>
<td valign="top" align="left"><inline-formula><mml:math id="M6"><mml:mi>w</mml:mi><mml:mi>h</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mtext>&#x000A0;</mml:mtext><mml:mi>a</mml:mi><mml:mi>m</mml:mi><mml:msub><mml:mrow><mml:mi>p</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mo>|</mml:mo><mml:mi>X</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:msup><mml:mrow><mml:mo>|</mml:mo></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup><mml:mo>,</mml:mo><mml:mtext>&#x000A0;</mml:mtext><mml:msub><mml:mrow><mml:mtext>&#x000A0;</mml:mtext><mml:mi>a</mml:mi><mml:mi>m</mml:mi><mml:mi>p</mml:mi></mml:mrow><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mo>|</mml:mo><mml:mi>Y</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:msup><mml:mrow><mml:mo>|</mml:mo></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup><mml:mtext>&#x000A0;</mml:mtext></mml:math></inline-formula></td>
<td/>
</tr>
<tr>
<td valign="top" align="left">PLV</td>
<td valign="top" align="left"><inline-formula><mml:math id="M7"><mml:msub><mml:mrow><mml:mi>r</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi></mml:mrow></mml:mfrac><mml:mo>|</mml:mo><mml:munderover accentunder="false" accent="false"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi></mml:mrow></mml:munderover><mml:msub><mml:mrow><mml:mi>&#x003C6;</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub><mml:mo>&#x000B7;</mml:mo><mml:msup><mml:mrow><mml:msub><mml:mrow><mml:mi>&#x003C6;</mml:mi></mml:mrow><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mrow><mml:mo>&#x0002A;</mml:mo></mml:mrow></mml:msup><mml:mo>|</mml:mo></mml:math></inline-formula></td>
<td valign="top" align="left">Lachaux et al., <xref ref-type="bibr" rid="B36">1999</xref>; <bold>Dumas et al.</bold>, <xref ref-type="bibr" rid="B19"><bold>2010</bold></xref></td>
</tr>
<tr>
<td/>
<td valign="top" align="left"><inline-formula><mml:math id="M8"><mml:mi>w</mml:mi><mml:mi>h</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mtext>&#x000A0;</mml:mtext><mml:msub><mml:mrow><mml:mi>&#x003C6;</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mi>X</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mo>|</mml:mo><mml:mi>X</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>|</mml:mo></mml:mrow></mml:mfrac><mml:mo>,</mml:mo><mml:mtext>&#x000A0;</mml:mtext><mml:msub><mml:mrow><mml:mtext>&#x000A0;</mml:mtext><mml:mi>&#x003C6;</mml:mi></mml:mrow><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mi>Y</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mo>|</mml:mo><mml:mi>Y</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>j</mml:mi><mml:mo>,</mml:mo><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>|</mml:mo></mml:mrow></mml:mfrac></mml:math></inline-formula></td>
<td/>
</tr>
<tr>
<td valign="top" align="left">CCorr</td>
<td valign="top" align="left"><inline-formula><mml:math id="M9"><mml:msub><mml:mrow><mml:mi>r</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:munderover accentunder="false" accent="false"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi></mml:mrow></mml:munderover><mml:mi>s</mml:mi><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mi>&#x003B8;</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub><mml:mo>-</mml:mo><mml:mover accent="false" class="mml-overline"><mml:mrow><mml:msub><mml:mrow><mml:mi>&#x003B8;</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo accent="true">&#x000AF;</mml:mo></mml:mover></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>&#x000B7;</mml:mo><mml:mi>s</mml:mi><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mi>&#x003B8;</mml:mi></mml:mrow><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub><mml:mo>-</mml:mo><mml:mover accent="false" class="mml-overline"><mml:mrow><mml:msub><mml:mrow><mml:mi>&#x003B8;</mml:mi></mml:mrow><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo accent="true">&#x000AF;</mml:mo></mml:mover></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:msqrt><mml:mrow><mml:munderover accentunder="false" accent="false"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi></mml:mrow></mml:munderover><mml:msup><mml:mrow><mml:mi>s</mml:mi><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mi>&#x003B8;</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub><mml:mo>-</mml:mo><mml:mover accent="false" class="mml-overline"><mml:mrow><mml:msub><mml:mrow><mml:mi>&#x003B8;</mml:mi></mml:mrow><mml:mrow><mml:mi>x</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo accent="true">&#x000AF;</mml:mo></mml:mover></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow></mml:msqrt><mml:msqrt><mml:mrow><mml:munderover accentunder="false" accent="false"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi></mml:mrow></mml:munderover><mml:msup><mml:mrow><mml:mi>s</mml:mi><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mi>&#x003B8;</mml:mi></mml:mrow><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub><mml:mo>-</mml:mo><mml:mover accent="false" class="mml-overline"><mml:mrow><mml:msub><mml:mrow><mml:mi>&#x003B8;</mml:mi></mml:mrow><mml:mrow><mml:mi>y</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo accent="true">&#x000AF;</mml:mo></mml:mover></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow></mml:msqrt></mml:mrow></mml:mfrac><mml:mtext>&#x000A0;</mml:mtext></mml:math></inline-formula></td>
<td valign="top" align="left"><bold>Burgess</bold>, <xref ref-type="bibr" rid="B8"><bold>2013</bold></xref><bold>; Goldstein et al.</bold>, <xref ref-type="bibr" rid="B23"><bold>2018</bold></xref></td>
</tr>
<tr>
<td/>
<td valign="top" align="left"><italic>where &#x003B8;</italic><sub><italic>x</italic></sub> &#x0003D; <italic>angle</italic>(<italic>X</italic>(<italic>i, t</italic>)), &#x003B8;<sub><italic>y</italic></sub> &#x0003D; <italic>angle</italic>(<italic>Y</italic>(<italic>j, t</italic>))</td>
<td/>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p><italic>The table shows a list of connectivity measures/synchrony metrics implemented in the software. The references column lists relevant papers, with hyperscanning studies adopting the metric marked in bold. The equations are adapted from the first paper in the reference list. The result of the equation is one connectivity value r<sub>(i, j)</sub> for electrode pair (i, j) in the N time-point window, computed from analytic signals X(i, t) and Y(j, t), as explained in section Connectivity Analysis</italic>.</p>
</table-wrap-foot>
</table-wrap>
<fig id="F5" position="float">
<label>Figure 5</label>
<caption><p>Connectivity parameters of the Hybrid Harmony GUI. The upper right part of the GUI, i.e., the &#x0201C;Parameters&#x0201D; control parameters for connectivity analysis (section Connectivity Analysis). Input type specifies the type of data (here: EEG). Connectivity Type determines how the connectivity values are averaged across electrode pairs. Connectivity metric determines the calculation of synchrony between two signals. Window size is the length of the data segment to compute synchrony over online. The checkbox sending through OSC determines whether connectivity values are sent through OSC (<xref ref-type="supplementary-material" rid="SM1">Supplementary Material</xref>, The Data Transfer Protocol: Open Sound Control) in addition to LSL. OSC IP address and OSC port are used to transport data.</p></caption>
<graphic xlink:href="fnrgo-02-687108-g0005.tif"/>
</fig>
<p>The metrics, which, as mentioned above, are a subset of those implemented in the hyperscanning analysis pipeline HyPyP (Ayrolles et al., <xref ref-type="bibr" rid="B3">2021</xref>), include two variations of correlation (envelope and power correlation) and two of coherence (coherence and imaginary coherence) measures, which are traditional linear methods to estimate brain connectivity, as well as two measures of phase synchrony (PLV and CCorr). While correlation methods are predominantly employed in hyperscanning fMRI studies to characterize joint action and shared attention (Koike et al., <xref ref-type="bibr" rid="B33">2016</xref>), as shown in the case study below, we have validated power correlation as a neurofeedback synchrony signal. Coherence is more commonly used in fNIRS and EEG hyperscanning studies (Liu et al., <xref ref-type="bibr" rid="B38">2016</xref>; Dikker et al., <xref ref-type="bibr" rid="B16">2017</xref>; Miller et al., <xref ref-type="bibr" rid="B41">2019</xref>). Imaginary coherence, i.e., the imaginary part of the coherence, was developed in response to coherence&#x00027;s susceptibility to zero-lagged spurious synchrony, and has been found to reflect personality traits and social closeness in hyperscanning studies (Nolte et al., <xref ref-type="bibr" rid="B47">2004</xref>; Dikker et al., <xref ref-type="bibr" rid="B13">2021</xref>). For phase synchrony, we included PLV, which has been widely used in hyperscanning studies to capture joint action (Dumas et al., <xref ref-type="bibr" rid="B19">2010</xref>), verbal interaction (Perez Repetto et al., <xref ref-type="bibr" rid="B54">2017</xref>), decision-making (Tang et al., <xref ref-type="bibr" rid="B66">2016</xref>) and other tasks. CCorr measures the covariance of phase variance between two data streams and is more robust to coincidental synchrony (Burgess, <xref ref-type="bibr" rid="B8">2013</xref>) compared to PLV, and has been used to investigate touch (Goldstein et al., <xref ref-type="bibr" rid="B23">2018</xref>), learning (Bevilacqua et al., <xref ref-type="bibr" rid="B6">2019</xref>) and language (Perez Repetto et al., <xref ref-type="bibr" rid="B54">2017</xref>) in hyperscanning studies.</p>
<p>In addition to the connectivity metric, users are also able to choose &#x0201C;connectivity type&#x0201D; (<xref ref-type="fig" rid="F3">Figure 3</xref>), which determines the electrode pair combination for connectivity. For each frequency band, the computation is carried out for every possible electrode pair between the participants, and then averaged based on the &#x0201C;connectivity type&#x0201D; parameter. If &#x0201C;connectivity type&#x0201D; is &#x0201C;one-to-one,&#x0201D; only electrode pairs in the matching position are considered (e.g., Fp1 channel of participant A is only paired with Fp1 of participant B and C, etc.); alternatively, if it is set to &#x0201C;all-to-all,&#x0201D; all electrode pairs are considered in v the averaging.</p>
<p>Connectivity Analysis outputs data chunks to LSL as a &#x0201C;Marker&#x0201D; stream under the name &#x0201C;Rvalues.&#x0201D; The size of this data chunk depends on the number of subjects and the number of frequency bands chosen for analysis. For example, if there are 4 subjects and 4 frequency bands, the data chunk will be a vector of length 24 (6 combinations of pairs times 4 frequency bands). Additionally, if the checkbox &#x0201C;sending through OSC&#x0201D; is selected, the same data chunks will simultaneously be transmitted through the OSC protocol with parameters in &#x0201C;OSC IP address&#x0201D; and &#x0201C;OSC port.&#x0201D;</p>
</sec>
<sec>
<title>Normalization</title>
<p>As part of Connectivity Analysis, normalization of connectivity values is implemented with two options: manual normalization (labeled as &#x0201C;Manual&#x0201D; in <xref ref-type="fig" rid="F3">Figure 3</xref>) and baselining with a pre-recorded file (labeled as &#x0201C;from file&#x0201D; in <xref ref-type="fig" rid="F3">Figure 3</xref>). With a Min-Max normalization method, the user can use either of the options, or a mixture of both with a weighting factor adjusted by the slider &#x0201C;Weight.&#x0201D; The minimum and maximum limits are then weighted between the &#x0201C;Manual&#x0201D; and &#x0201C;from file&#x0201D; options.</p>
</sec>
<sec>
<title>Visualization and Sonification</title>
<sec>
<title>Visualization</title>
<p>The example visualization protocol provided with the software is based on <italic>Mutual Brainwaves Lab</italic> (<xref ref-type="fig" rid="F6">Figure 6</xref>), described in <xref ref-type="supplementary-material" rid="SM1">Supplementary Material</xref>, Mutual Brainwaves Lab. The visualization app was originally built in C&#x0002B;&#x0002B; using the OpenFramework toolkit (<ext-link ext-link-type="uri" xlink:href="https://openframeworks.cc/">https://openframeworks.cc/</ext-link>), a general-purpose framework that wraps together several libraries to assist the creative process. An updated Python3 version makes it easier to deploy the application on different OS (Windows, MacOS, Linux) and more maintainable and extendable for developers, thanks to Python&#x00027;s more accessible syntax and a wide choice of libraries. The application relies on an OSC plugin that listens in real-time for an OSC sender over the network. The GUI of the application is built using OpenFramework for the C&#x0002B;&#x0002B; version and PyQt5 (<ext-link ext-link-type="uri" xlink:href="https://pypi.org/project/PyQt5/">https://pypi.org/project/PyQt5/</ext-link>) for the Python version. When launched, it presents two avatars representing human heads with a brain icon, and a menu with various options to parameterize the interface. As soon as the OSC receiver starts receiving a stream of data from the Hybrid Harmony running on the same network, the application translates the inter-brain synchrony as the distance between the avatar heads. The user can also set up sessions where participants are encouraged to &#x0201C;score&#x0201D; higher synchrony in a limited timeframe.</p>
<fig id="F6" position="float">
<label>Figure 6</label>
<caption><p>Visualization schematic. Connectivity values are visualized as the distance between the two merging heads using the visualization module.</p></caption>
<graphic xlink:href="fnrgo-02-687108-g0006.tif"/>
</fig>
</sec>
<sec>
<title>Sonification</title>
<p>Sonification of EEG data has been explored in projects such as EEGsynth (<ext-link ext-link-type="uri" xlink:href="https://github.com/eegsynth/eegsynth">https://github.com/eegsynth/eegsynth</ext-link>), which interfaces electrophysiological recordings (e.g., EEG, EMG and ECG) with analog synthesizers and digital devices. Here, we demonstrate that Hybrid Harmony can be easily interfaced with a digital audio workstation (DAW) in real-time through an OSC (<xref ref-type="supplementary-material" rid="SM1">Supplementary Material</xref>, The Data Transfer Protocol: Open Sound Control) plugin, allowing for the control of audio parameters based on the connectivity values sent through OSC. We describe the protocol for the control of Ableton Live via LiveGrabber (<ext-link ext-link-type="uri" xlink:href="https://www.showsync.com/tools&#x00023;livegrabber">https://www.showsync.com/tools&#x00023;livegrabber</ext-link>), a set of free Ableton plugins. LiveGrabber receives messages from any OSC sender on the network and uses OSC messages to control track parameters in Ableton.</p>
<p>After specifying the OSC IP address and port in the Hybrid Harmony GUI, the output can be received by the GrabberReceiver plugin in Ableton (part of the LiveGrabber package). The TrackGrabber plugin allows for the control of track parameters (such as volume, reverb, panning, etc.) using the output from Hybrid Harmony in real time. To illustrate a simple sonification example, we have created a soundscape in which the volume of certain musical pitches can be modulated to create alternating moments of <italic>dissonance</italic> (harmonic tension or unpleasant sounding chords) and <italic>consonance</italic> (harmonic resolution or pleasant, stable chords). The volume of each pitch is directly controlled by connectivity parameters output through OSC, such that greater connectivity values (moments of increased interpersonal synchrony) correspond to more pleasant, stable sounding chords.</p>
</sec>
</sec>
</sec>
<sec id="s3">
<title>Validation</title>
<p>To validate that Hybrid Harmony can capture socially relevant self-report measures, we used a dataset of 243 dyads participating in the Mutual Wave Machine (<xref ref-type="fig" rid="F7">Figure 7</xref>; see <xref ref-type="supplementary-material" rid="SM1">Supplementary Material</xref>, Case study: The Mutual Wave Machine for details), during which real-time envelope correlations were recorded and translated into light patterns projected onto the surface of two spheres (Hybrid Harmony parameters: &#x0201C;Frequency bands for analysis&#x0201D;: delta 1&#x02013;4 Hz, theta 4&#x02013;8 Hz, alpha 8&#x02013;12 Hz, beta 12&#x02013;20 Hz; &#x0201C;Connectivity type&#x0201D;: one-to-one; &#x0201C;Connectivity metric&#x0201D;: envelope correlation; &#x0201C;Window size&#x0201D;: 3 s).</p>
<fig id="F7" position="float">
<label>Figure 7</label>
<caption><p>The Mutual Wave Machine using inter-brain envelope correlations with Hybrid Harmony. <bold>(A)</bold> Inter-brain correlations between two participants wearing wireless EEG headsets were computed in real time. <bold>(B)</bold> Higher inter-brain correlation values correspond to more light projected on each of the surfaces, with the focus point behind each participant&#x00027;s head.</p></caption>
<graphic xlink:href="fnrgo-02-687108-g0007.tif"/>
</fig>
<p>We first asked whether the average envelope correlation reflected dyads&#x00027; personal distress (Davis, <xref ref-type="bibr" rid="B12">1980</xref>), building on past research where we consistently find a negative relationship between personal distress and inter-brain synchrony (Dikker et al., <xref ref-type="bibr" rid="B16">2017</xref>, <xref ref-type="bibr" rid="B13">2021</xref>; Chen et al., <xref ref-type="bibr" rid="B9">2021</xref>; Reinero et al., <xref ref-type="bibr" rid="B60">2021</xref>). Indeed, we find that pairs&#x00027; average personal distress was negatively correlated with their neurofeedback synchrony in theta [<italic>r</italic><sub>(236)</sub> = &#x02212;0.182, pFDR = 0.010], alpha [<italic>r</italic><sub>(236)</sub> = &#x02212;0.204, pFDR = 0.004] and beta [<italic>r</italic><sub>(236)</sub> = &#x02212;0.178, pFDR = 0.010].</p>
<p>We then asked whether social closeness (Aron et al., <xref ref-type="bibr" rid="B2">1992</xref>) was positively related to pairs&#x00027; envelope correlations. We reasoned that this is an interpersonal state measure that might be the target for a social neurofeedback intervention. Indeed, we find significant positive correlations between social closeness and pairs&#x00027; neurofeedback synchrony in the alpha [<italic>r</italic><sub>(236)</sub> = 0.264, pFDR &#x0003C;0.001] and beta bands [<italic>r</italic><sub>(236)</sub> = 0.210, pFDR = 0.004].</p>
<p>While these results do not speak to the efficacy of Hybrid Harmony as a social neurofeedback tool, they confirm a very important first step, namely that the neurofeedback output is correlated with socially relevant features that might be the target for neurofeedback interventions.</p>
</sec>
<sec sec-type="discussion" id="s4">
<title>Discussion</title>
<p>We describe Hybrid Harmony, an open-source software that allows researchers to explore interpersonal synchrony in a plug-and-play setup. The project builds on previous work from our group suggesting that incorporating synchrony neurofeedback in naturalistic social interactions may help increase synchrony and interpersonal connectedness, as such raising the possibility that biofeedback may constitute a useful tool to explore meaningful features of social interaction (Dikker et al., <xref ref-type="bibr" rid="B14">2019</xref>, <xref ref-type="bibr" rid="B13">2021</xref>).</p>
<p>As discussed in the Introduction, inter-brain synchrony has been shown to correlate with a range of personal and social characteristics and behaviors, underscoring its relevance in understanding naturalistic social interactions. Interpersonal biofeedback approaches may make it possible to more precisely map such social and psychological factors onto specific neurophysiological processes. For example, testing different types of synchrony in a neurofeedback environment might help inform the field about which metric is the most indicative of social behavior and outcomes in which social contexts. Dyads may be more responsive to multi-brain neurofeedback based on, say, coherence during collaborative tasks, but more responsive to, say, envelope correlations during social sharing. Future findings of such a nature will enrich our knowledge about the social relevance of these metrics and constitute a non-invasive way to probe possible causal links between inter-brain synchrony and social behavior (Moreau and Dumas, <xref ref-type="bibr" rid="B43">2021</xref>; Novembre and Iannetti, <xref ref-type="bibr" rid="B48">2021</xref>). Here we show in a dataset of 243 dyads that social closeness and affective traits can, in fact, be reflected in online synchrony neurofeedback measures, which is an important firspage t step in this direction.</p>
<sec>
<title>Future Directions and Challenges</title>
<p>Beyond inter-brain coupling, interpersonal synchrony has been examined in more depth in other aspects of behavior, including movement (Oullier et al., <xref ref-type="bibr" rid="B52">2008</xref>; van Ulzen et al., <xref ref-type="bibr" rid="B70">2008</xref>; Varlet et al., <xref ref-type="bibr" rid="B71">2011</xref>), language (Pickering and Garrod, <xref ref-type="bibr" rid="B56">2004</xref>) and physiological rhythms such as heart rate and respiration (M&#x000FC;ller and Lindenberger, <xref ref-type="bibr" rid="B45">2011</xref>; Noy et al., <xref ref-type="bibr" rid="B49">2015</xref>). As described above, movement and physiological synchrony may be both cause and effect of inter-brain coupling. In future iterations of Hybrid Harmony, we hope to extend the software to incorporate multiple data streams including physiological and movement data. This would allow users to compare the social relevance of various forms of synchrony, and possibly to tease apart interrelationships between (neuro)physiological and behavioral coupling (e.g., Dumas et al., <xref ref-type="bibr" rid="B19">2010</xref>; Mayo and Gordon, <xref ref-type="bibr" rid="B39">2020</xref>; Pan et al., <xref ref-type="bibr" rid="B53">2020</xref>). Similarly, while we prioritized EEG research in Hybrid Harmony, given the increasingly rich fNIRS hyperscanning literature (Liu et al., <xref ref-type="bibr" rid="B38">2016</xref>; Nozawa et al., <xref ref-type="bibr" rid="B50">2016</xref>; Miller et al., <xref ref-type="bibr" rid="B41">2019</xref>) and recent successful work in fNIRS neurofeedback (Gvirts and Perlmutter, <xref ref-type="bibr" rid="B26">2020</xref>; Kohl et al., <xref ref-type="bibr" rid="B31">2020</xref>) we believe an extension of Hybrid Harmony to include metrics suitable for fNIRS data would be a very welcome and fruitful future direction for the software.</p>
<p>EEG systems&#x00027; susceptibility to movements and environmental noises can greatly compromise data quality and introduce spurious synchrony in our measure. In controlled lab studies, motion artifacts are often carefully removed manually and through data decomposition (e.g., principal component analysis). In the neurofeedback setting, however, such procedures haven&#x00027;t been widely implemented. In our practice, we tried to address the issue empirically with several solutions. For example, we piloted a version of the Mutual Wave Machine where sudden motion-related fluctuations in the data were removed, but this dramatically influenced the experience: participants often react enthusiastically to a sudden increase in light, only to get &#x0201C;punished&#x0201D; for facial expression, which would discourage them from naturally engaging with each other. We considered patching the data with correlations from non-contaminated stretches of data, but this would lead to arbitrary choices. We therefore instead opted for an alternative solution where participants were told explicitly that because extensive head and facial movements can dramatically affect the EEG signal, what they were seeing could also be caused by synchronous noise or synchronous movement. While this option sufficed for the experiential side of the neurofeedback, it is suboptimal with regard to data fidelity. In future releases of the software, we will incorporate support for online data cleaning procedures such as toolboxes (Mullen et al., <xref ref-type="bibr" rid="B44">2015</xref>) and EEG systems that provide built-in data cleaning options in their software, e.g., the SMARTING system by mBrainTrain (Lee et al., <xref ref-type="bibr" rid="B37">2020</xref>).</p>
<p>In addition to challenges related to data cleaning, the real-time nature of the analysis procedure poses challenges in terms of its interpretation. For instance, data fidelity is much higher when applying filtering and correlation analysis on larger stretches of data, but in the type of analysis employed here, this would compromise the immediacy of the neurofeedback. Therefore, it is important to note that our real-time approach might not be able to characterize those types of synchrony that may not be temporally aligned over short intervals. For example, fMRI studies have suggested there may be delays of up to 8 seconds in inter-brain synchrony between speakers and listeners (Stephens et al., <xref ref-type="bibr" rid="B64">2010a</xref>; Dikker et al., <xref ref-type="bibr" rid="B15">2014</xref>; Misaki et al., <xref ref-type="bibr" rid="B42">2021</xref>). Indeed, while successful joint action is typically associated with the coupling of motor movements (Dumas et al., <xref ref-type="bibr" rid="B19">2010</xref>), being &#x0201C;in sync&#x0201D; or &#x0201C;on the same wavelength&#x0201D; is often taken to imply interactive alignment at the level of mental representations (Garrod and Pickering, <xref ref-type="bibr" rid="B22">2009</xref>; Pickering and Garrod, <xref ref-type="bibr" rid="B57">2013</xref>), usually involving more &#x0201C;abstract&#x0201D; constructs such as sharing viewpoints (Van Berkum et al., <xref ref-type="bibr" rid="B68">2009</xref>). These mental representations may or may not be linked to convergence at the temporal level. In line with this dissociation, in two instances of the Mutual Wave Machine we asked participants to reflect on their &#x0201C;connection strategies.&#x0201D; Pairs who used either eye contact or joint action as a connection strategy (mimicry, laughter, motion coordination) exhibited an increase in inter-brain synchrony over time as measured by Imaginary Coherence and Projected Power Correlation (Dikker et al., <xref ref-type="bibr" rid="B13">2021</xref>). Such an increase in synchrony was not observed for pairs who tried &#x0201C;thinking about the same thing.&#x0201D; While these results validated our approach in capturing synchrony in joint action, they do not exclude the possibility of synchrony on the abstract mental representations, given that such synchrony may entail more complex temporal dynamics.</p>
<p>In future iterations, we hope to incorporate alternatives to the time-aligned approach, such as introducing a temporal delay between the data streams and real-time adaptive normalization. These additions will hopefully increase sensitivity to endogenous synchronizers and facilitate more controlled experimental designs (Stephens et al., <xref ref-type="bibr" rid="B65">2010b</xref>).</p>
<p>Finally, it is worth reiterating that very little is known about the correspondence between different synchrony analysis metrics and socio-psychologically relevant factors. Although some metrics are better than others in theory (e.g., CCorr is more robust to spurious synchrony than PLV), and some are more common in hyperscanning studies than single brain studies (e.g., phase synchrony is more common than coherence/correlation), the exact pros and cons of each metric require further investigation. As more becomes known about the mapping between psychological processes as inter-brain synchrony metrics, we will add guidance for users with respect to the choice of metrics in different situations.</p>
</sec>
</sec>
<sec sec-type="conclusions" id="s5">
<title>Conclusion</title>
<p>In this study, we describe the background, functionality, and validation of Hybrid Harmony, a multi-person neurofeedback application for interpersonal synchrony. With its user-friendly interface and flexible design, Hybrid Harmony enables researchers to explore the interplay between synchrony as a computational method and the various psychological, cognitive, and social functions potentially associated with it.</p>
</sec>
<sec sec-type="data-availability-statement" id="s6">
<title>Data Availability Statement</title>
<p>The datasets presented in this study can be found in online repositories. The names of the repository/repositories and accession number(s) can be found at: <ext-link ext-link-type="uri" xlink:href="https://github.com/RhythmsOfRelating">https://github.com/RhythmsOfRelating</ext-link>.</p>
</sec>
<sec id="s7">
<title>Ethics Statement</title>
<p>Ethical review and approval was not required for the study on human participants in accordance with the local legislation and institutional requirements. Written informed consent to participate in this study was provided by the participants&#x00027; legal guardian/next of kin. Written informed consent was obtained from the individual(s), and minor(s)&#x00027; legal guardian/next of kin, for the publication of any potentially identifiable images or data included in this article.</p>
</sec>
<sec id="s8">
<title>Author Contributions</title>
<p>SD, MO, and PC contributed to conception and design of the study. MO, DM, SH, MR, GD, and PC developed the software. PC performed the statistical analysis. PC, SD, SH, MR, KS DM, and GD wrote sections of the manuscript. All authors contributed to manuscript revision, read, and approved the submitted version.</p>
</sec>
<sec sec-type="COI-statement" id="conf1">
<title>Conflict of Interest</title>
<p>DM is the co-director of Diademics Pty Ltd company. The handling Editor declared a shared affiliation, though no other collaboration, with one of the authors MR. The remaining authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="disclaimer" id="s9">
<title>Publisher&#x00027;s Note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
</body>
<back>
<ack><p>We thank Brett Bayes, Laura Gwilliams, and Jean Jacques Warmerdham for their input in the project; and Brain Products, EMOTIV, MUSE, and mBrain Train for their continued support of our work.</p>
</ack>
<sec sec-type="supplementary-material" id="s10">
<title>Supplementary Material</title>
<p>The Supplementary Material for this article can be found online at: <ext-link ext-link-type="uri" xlink:href="https://www.frontiersin.org/articles/10.3389/fnrgo.2021.687108/full#supplementary-material">https://www.frontiersin.org/articles/10.3389/fnrgo.2021.687108/full#supplementary-material</ext-link></p>
<supplementary-material xlink:href="Data_Sheet_1.pdf" id="SM1" mimetype="application/pdf" xmlns:xlink="http://www.w3.org/1999/xlink"/>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Abe</surname> <given-names>M. O.</given-names></name> <name><surname>Koike</surname> <given-names>T.</given-names></name> <name><surname>Okazaki</surname> <given-names>S.</given-names></name> <name><surname>Sugawara</surname> <given-names>S. K.</given-names></name> <name><surname>Takahashi</surname> <given-names>K.</given-names></name> <name><surname>Watanabe</surname> <given-names>K.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>Neural correlates of online cooperation during joint force production</article-title>. <source>Neuroimage</source> <volume>191</volume>, <fpage>150</fpage>&#x02013;<lpage>161</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2019.02.003</pub-id><pub-id pub-id-type="pmid">30739061</pub-id></citation></ref>
<ref id="B2">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Aron</surname> <given-names>A.</given-names></name> <name><surname>Aron</surname> <given-names>E. N.</given-names></name> <name><surname>Smollan</surname> <given-names>D.</given-names></name></person-group> (<year>1992</year>). <article-title>Inclusion of other in the self scale and the structure of interpersonal closeness</article-title>. <source>J. Pers. Soc. Psychol</source>. <volume>63</volume>, <fpage>596</fpage>&#x02013;<lpage>612</lpage>. <pub-id pub-id-type="doi">10.1037/0022-3514.63.4.596</pub-id></citation>
</ref>
<ref id="B3">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ayrolles</surname> <given-names>A.</given-names></name> <name><surname>Brun</surname> <given-names>F.</given-names></name> <name><surname>Chen</surname> <given-names>P.</given-names></name> <name><surname>Djalovski</surname> <given-names>A.</given-names></name> <name><surname>Beauxis</surname> <given-names>Y.</given-names></name> <name><surname>Delorme</surname> <given-names>R.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>HyPyP: a hyperscanning python pipeline for inter-brain connectivity analysis</article-title>. <source>Soc. Cogn. Affect. Neurosci</source>. <volume>16</volume>, <fpage>72</fpage>&#x02013;<lpage>83</lpage>. <pub-id pub-id-type="doi">10.1093/scan/nsaa141</pub-id><pub-id pub-id-type="pmid">33031496</pub-id></citation></ref>
<ref id="B4">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Babiloni</surname> <given-names>F.</given-names></name> <name><surname>Astolfi</surname> <given-names>L.</given-names></name></person-group> (<year>2014</year>). <article-title>Social neuroscience and hyperscanning techniques: past, present and future</article-title>. <source>Neurosci. Biobehav. Rev</source>. <volume>44</volume>, <fpage>76</fpage>&#x02013;<lpage>93</lpage>. <pub-id pub-id-type="doi">10.1016/j.neubiorev.2012.07.006</pub-id><pub-id pub-id-type="pmid">22917915</pub-id></citation></ref>
<ref id="B5">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Babiloni</surname> <given-names>F.</given-names></name> <name><surname>Cincotti</surname> <given-names>F.</given-names></name> <name><surname>Mattia</surname> <given-names>D.</given-names></name> <name><surname>Mattiocco</surname> <given-names>M.</given-names></name> <name><surname>Bufalari</surname> <given-names>S.</given-names></name> <name><surname>De Vico Fallani</surname> <given-names>F.</given-names></name> <etal/></person-group>. (<year>2006</year>). <article-title>Neural basis for the brain responses to the marketing messages: an high resolution EEG study</article-title>, in <source>Conference Proceedings: Annual International Conference of the IEEE Engineering in Medicine and Biology Society. IEEE Engineering in Medicine and Biology Society. Conference</source>, <publisher-loc>New York, NY</publisher-loc>, <fpage>3676</fpage>&#x02013;<lpage>3679</lpage>.<pub-id pub-id-type="pmid">17946577</pub-id></citation></ref>
<ref id="B6">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bevilacqua</surname> <given-names>D.</given-names></name> <name><surname>Davidesco</surname> <given-names>I.</given-names></name> <name><surname>Wan</surname> <given-names>L.</given-names></name> <name><surname>Chaloner</surname> <given-names>K.</given-names></name> <name><surname>Rowland</surname> <given-names>J.</given-names></name> <name><surname>Ding</surname> <given-names>M.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>Brain-to-brain synchrony and learning outcomes vary by student&#x02013;teacher dynamics: evidence from a real-world classroom electroencephalography study</article-title>. <source>J. Cogn. Neurosci</source>. <volume>31</volume>, <fpage>401</fpage>&#x02013;<lpage>411</lpage>. <pub-id pub-id-type="doi">10.1162/jocn_a_01274</pub-id><pub-id pub-id-type="pmid">29708820</pub-id></citation></ref>
<ref id="B7">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bhayee</surname> <given-names>S.</given-names></name> <name><surname>Tomaszewski</surname> <given-names>P.</given-names></name> <name><surname>Lee</surname> <given-names>D. H.</given-names></name> <name><surname>Moffat</surname> <given-names>G.</given-names></name> <name><surname>Pino</surname> <given-names>L.</given-names></name> <name><surname>Moreno</surname> <given-names>S.</given-names></name> <etal/></person-group>. (<year>2016</year>). <article-title>Attentional and affective consequences of technology supported mindfulness training: a randomised, active control, efficacy trial</article-title>. <source>BMC Psychol</source>. <volume>4</volume>:<fpage>60</fpage>. <pub-id pub-id-type="doi">10.1186/s40359-016-0168-6</pub-id><pub-id pub-id-type="pmid">27894358</pub-id></citation></ref>
<ref id="B8">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Burgess</surname> <given-names>A. P.</given-names></name></person-group> (<year>2013</year>). <article-title>On the interpretation of synchronization in EEG hyperscanning studies: a cautionary note</article-title>. <source>Front. Hum. Neurosci</source>. <volume>7</volume>:<fpage>881</fpage>. <pub-id pub-id-type="doi">10.3389/fnhum.2013.00881</pub-id><pub-id pub-id-type="pmid">24399948</pub-id></citation></ref>
<ref id="B9">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chen</surname> <given-names>P.</given-names></name> <name><surname>Kirk</surname> <given-names>U.</given-names></name> <name><surname>Dikker</surname> <given-names>S.</given-names></name></person-group> (<year>2021</year>). <article-title>Trait mindfulness predicts inter-brain coupling during naturalistic face-to-face interactions</article-title>. <source>bioRxiv [Preprint]</source>. <pub-id pub-id-type="doi">10.1101/2021.06.28.448432</pub-id></citation>
</ref>
<ref id="B10">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Clerico</surname> <given-names>A.</given-names></name> <name><surname>Gupta</surname> <given-names>R.</given-names></name> <name><surname>Falk</surname> <given-names>T. H.</given-names></name></person-group> (<year>2015</year>). <article-title>Mutual information between inter-hemispheric EEG spectro-temporal patterns: a new feature for automated affect recognition</article-title>, in <source>2015 7th International IEEE/EMBS Conference on Neural Engineering (NER)</source>, <volume>Montpellier</volume>, <fpage>914</fpage>&#x02013;<lpage>917</lpage>.</citation>
</ref>
<ref id="B11">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Czeszumski</surname> <given-names>A.</given-names></name> <name><surname>Eustergerling</surname> <given-names>S.</given-names></name> <name><surname>Lang</surname> <given-names>A.</given-names></name> <name><surname>Menrath</surname> <given-names>D.</given-names></name> <name><surname>Gerstenberger</surname> <given-names>M.</given-names></name> <name><surname>Schuberth</surname> <given-names>S.</given-names></name> <etal/></person-group>. (<year>2020</year>). <article-title>Hyperscanning: a valid method to study neural inter-brain underpinnings of social interaction</article-title>. <source>Front. Hum. Neurosci</source>. <volume>14</volume>:<fpage>39</fpage>. <pub-id pub-id-type="doi">10.3389/fnhum.2020.00039</pub-id><pub-id pub-id-type="pmid">32180710</pub-id></citation></ref>
<ref id="B12">
<citation citation-type="web"><person-group person-group-type="author"><name><surname>Davis</surname> <given-names>M. H.</given-names></name></person-group> (<year>1980</year>). <source>A Multidimensional Approach to Individual Differences in Empathy</source>. Available online at: <ext-link ext-link-type="uri" xlink:href="https://www.uv.es/friasnav/Davis_1980.pdf">https://www.uv.es/friasnav/Davis_1980.pdf</ext-link> (accessed March 29, 2021).</citation>
</ref>
<ref id="B13">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dikker</surname> <given-names>S.</given-names></name> <name><surname>Michalareas</surname> <given-names>G.</given-names></name> <name><surname>Oostrik</surname> <given-names>M.</given-names></name> <name><surname>Serafimaki</surname> <given-names>A.</given-names></name> <name><surname>Kahraman</surname> <given-names>H. M.</given-names></name> <name><surname>Struiksma</surname> <given-names>M. E.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>Crowdsourcing neuroscience: inter-brain coupling during face-to-face interactions outside the laboratory</article-title>. <source>Neuroimage</source> <volume>227</volume>:<fpage>117436</fpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2020.117436</pub-id><pub-id pub-id-type="pmid">33039619</pub-id></citation></ref>
<ref id="B14">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Dikker</surname> <given-names>S.</given-names></name> <name><surname>Montgomery</surname> <given-names>S.</given-names></name> <name><surname>Tunca</surname> <given-names>S.</given-names></name></person-group> (<year>2019</year>). <article-title>Using synchrony-based neurofeedback in search of human connectedness</article-title>, in <source>Brain Art: Brain-Computer Interfaces for Artistic Expression</source>, ed <person-group person-group-type="editor"><name><surname>Nijholt</surname> <given-names>A.</given-names></name></person-group> (<publisher-name>Springer International Publishing</publisher-name>), <fpage>161</fpage>&#x02013;<lpage>206</lpage>.</citation>
</ref>
<ref id="B15">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dikker</surname> <given-names>S.</given-names></name> <name><surname>Silbert</surname> <given-names>L. J.</given-names></name> <name><surname>Hasson</surname> <given-names>U.</given-names></name> <name><surname>Zevin</surname> <given-names>J. D.</given-names></name></person-group> (<year>2014</year>). <article-title>On the same wavelength: predictable language enhances speaker&#x02013;listener brain-to-brain synchrony in posterior superior temporal gyrus</article-title>. <source>J. Neurosci</source>. <volume>34</volume>, <fpage>6267</fpage>&#x02013;<lpage>6272</lpage>. <pub-id pub-id-type="doi">10.1523/JNEUROSCI.3796-13.2014</pub-id><pub-id pub-id-type="pmid">24790197</pub-id></citation></ref>
<ref id="B16">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dikker</surname> <given-names>S.</given-names></name> <name><surname>Wan</surname> <given-names>L.</given-names></name> <name><surname>Davidesco</surname> <given-names>I.</given-names></name> <name><surname>Kaggen</surname> <given-names>L.</given-names></name> <name><surname>Oostrik</surname> <given-names>M.</given-names></name> <name><surname>McClintock</surname> <given-names>J.</given-names></name> <etal/></person-group>. (<year>2017</year>). <article-title>Brain-to-brain synchrony tracks real-world dynamic group interactions in the classroom</article-title>. <source>Curr. Biol</source>. <volume>27</volume>, <fpage>1375</fpage>&#x02013;<lpage>1380</lpage>. <pub-id pub-id-type="doi">10.1016/j.cub.2017.04.002</pub-id><pub-id pub-id-type="pmid">29209185</pub-id></citation></ref>
<ref id="B17">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Duan</surname> <given-names>L.</given-names></name> <name><surname>Liu</surname> <given-names>W.-J.</given-names></name> <name><surname>Dai</surname> <given-names>R.-N.</given-names></name> <name><surname>Li</surname> <given-names>R.</given-names></name> <name><surname>Lu</surname> <given-names>C.-M.</given-names></name> <name><surname>Huang</surname> <given-names>Y.-X.</given-names></name> <etal/></person-group>. (<year>2013</year>). <article-title>Cross-brain neurofeedback: scientific concept and experimental platform</article-title>. <source>PLoS ONE</source> <volume>8</volume>:<fpage>e64590</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0064590</pub-id><pub-id pub-id-type="pmid">23691253</pub-id></citation></ref>
<ref id="B18">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dumas</surname> <given-names>G.</given-names></name> <name><surname>Fairhurst</surname> <given-names>M. T.</given-names></name></person-group> (<year>2021</year>). <article-title>Reciprocity and alignment: quantifying coupling in dynamic interactions</article-title>. <source>R. Soc. Open Sci</source>. <volume>8</volume>:<fpage>210138</fpage>. <pub-id pub-id-type="doi">10.1098/rsos.210138</pub-id><pub-id pub-id-type="pmid">34040790</pub-id></citation></ref>
<ref id="B19">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dumas</surname> <given-names>G.</given-names></name> <name><surname>Nadel</surname> <given-names>J.</given-names></name> <name><surname>Soussignan</surname> <given-names>R.</given-names></name> <name><surname>Martinerie</surname> <given-names>J.</given-names></name> <name><surname>Garnero</surname> <given-names>L.</given-names></name></person-group> (<year>2010</year>). <article-title>Inter-brain synchronization during social interaction</article-title>. <source>PLoS ONE</source> <volume>5</volume>:<fpage>e12166</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0012166</pub-id><pub-id pub-id-type="pmid">20808907</pub-id></citation></ref>
<ref id="B20">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fang</surname> <given-names>Z.</given-names></name> <name><surname>Ray</surname> <given-names>L. B.</given-names></name> <name><surname>Owen</surname> <given-names>A. M.</given-names></name> <name><surname>Fogel</surname> <given-names>S. M.</given-names></name></person-group> (<year>2019</year>). <article-title>Brain activation time-locked to sleep spindles associated with human cognitive abilities</article-title>. <source>Front. Neurosci</source>. <volume>13</volume>:<fpage>46</fpage>. <pub-id pub-id-type="doi">10.3389/fnins.2019.00046</pub-id><pub-id pub-id-type="pmid">30787863</pub-id></citation></ref>
<ref id="B21">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fazel-Rezai</surname> <given-names>R.</given-names></name> <name><surname>Allison</surname> <given-names>B. Z.</given-names></name> <name><surname>Guger</surname> <given-names>C.</given-names></name> <name><surname>Sellers</surname> <given-names>E. W.</given-names></name> <name><surname>Kleih</surname> <given-names>S. C.</given-names></name> <name><surname>K&#x000FC;bler</surname> <given-names>A.</given-names></name></person-group> (<year>2012</year>). <article-title>P300 brain computer interface: current challenges and emerging trends</article-title>. <source>Front. Neuroeng</source>. <volume>5</volume>:<fpage>14</fpage>. <pub-id pub-id-type="doi">10.3389/fneng.2012.00014</pub-id><pub-id pub-id-type="pmid">22822397</pub-id></citation></ref>
<ref id="B22">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Garrod</surname> <given-names>S.</given-names></name> <name><surname>Pickering</surname> <given-names>M. J.</given-names></name></person-group> (<year>2009</year>). <article-title>Joint action, interactive alignment, and dialog</article-title>. <source>Top. Cogn. Sci</source>. <volume>1</volume>, <fpage>292</fpage>&#x02013;<lpage>304</lpage>. <pub-id pub-id-type="doi">10.1111/j.1756-8765.2009.01020.x</pub-id><pub-id pub-id-type="pmid">25164934</pub-id></citation></ref>
<ref id="B23">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Goldstein</surname> <given-names>P.</given-names></name> <name><surname>Weissman-Fogel</surname> <given-names>I.</given-names></name> <name><surname>Dumas</surname> <given-names>G.</given-names></name> <name><surname>Shamay-Tsoory</surname> <given-names>S. G.</given-names></name></person-group> (<year>2018</year>). <article-title>Brain-to-brain coupling during handholding is associated with pain reduction</article-title>. <source>Proc. Natl. Acad. Sci. U.S.A</source>. <volume>115</volume>, <fpage>E2528</fpage>&#x02013;<lpage>E2537</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.1703643115</pub-id><pub-id pub-id-type="pmid">29483250</pub-id></citation></ref>
<ref id="B24">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Grennan</surname> <given-names>G.</given-names></name> <name><surname>Balasubramani</surname> <given-names>P. P.</given-names></name> <name><surname>Alim</surname> <given-names>F.</given-names></name> <name><surname>Zafar-Khan</surname> <given-names>M.</given-names></name> <name><surname>Lee</surname> <given-names>E. E.</given-names></name> <name><surname>Jeste</surname> <given-names>D. V.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>Cognitive and neural correlates of loneliness and wisdom during emotional bias</article-title>. <source>Cereb. Cortex</source> <volume>31</volume>, <fpage>3311</fpage>&#x02013;<lpage>3322</lpage>. <pub-id pub-id-type="doi">10.1093/cercor/bhab012</pub-id><pub-id pub-id-type="pmid">33687437</pub-id></citation></ref>
<ref id="B25">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Guevara</surname> <given-names>M. A.</given-names></name> <name><surname>Corsi-Cabrera</surname> <given-names>M.</given-names></name></person-group> (<year>1996</year>). <article-title>EEG coherence or EEG correlation?</article-title> <source>Int. J. Psychophysiol</source>. <volume>23</volume>, <fpage>145</fpage>&#x02013;<lpage>153</lpage>. <pub-id pub-id-type="doi">10.1016/S0167-8760(96)00038-4</pub-id><pub-id pub-id-type="pmid">8947780</pub-id></citation></ref>
<ref id="B26">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gvirts</surname> <given-names>H. Z.</given-names></name> <name><surname>Perlmutter</surname> <given-names>R.</given-names></name></person-group> (<year>2020</year>). <article-title>What guides us to neurally and behaviorally align with anyone specific? A neurobiological model based on fNIRS hyperscanning studies. The neuroscientist: a review</article-title>. <source>J. Bring. Neurobiol. Neurol. Psychiatry</source> <volume>26</volume>, <fpage>108</fpage>&#x02013;<lpage>116</lpage>. <pub-id pub-id-type="doi">10.1177/1073858419861912</pub-id><pub-id pub-id-type="pmid">31296135</pub-id></citation></ref>
<ref id="B27">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hari</surname> <given-names>R.</given-names></name> <name><surname>Himberg</surname> <given-names>T.</given-names></name> <name><surname>Nummenmaa</surname> <given-names>L.</given-names></name> <name><surname>H&#x000E4;m&#x000E4;l&#x000E4;inen</surname> <given-names>M.</given-names></name> <name><surname>Parkkonen</surname> <given-names>L.</given-names></name></person-group> (<year>2013</year>). <article-title>Synchrony of brains and bodies during implicit interpersonal interaction</article-title>. <source>Trends Cogn. Sci</source>. <volume>17</volume>, <fpage>105</fpage>&#x02013;<lpage>106</lpage>. <pub-id pub-id-type="doi">10.1016/j.tics.2013.01.003</pub-id><pub-id pub-id-type="pmid">23384658</pub-id></citation></ref>
<ref id="B28">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hasson</surname> <given-names>U.</given-names></name></person-group> (<year>2004</year>). <article-title>intersubject synchronization of cortical activity during natural vision</article-title>. <source>Science</source> <volume>303</volume>, <fpage>1634</fpage>&#x02013;<lpage>1640</lpage>. <pub-id pub-id-type="doi">10.1126/science.1089506</pub-id><pub-id pub-id-type="pmid">15016991</pub-id></citation></ref>
<ref id="B29">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hoehl</surname> <given-names>S.</given-names></name> <name><surname>Fairhurst</surname> <given-names>M.</given-names></name> <name><surname>Schirmer</surname> <given-names>A.</given-names></name></person-group> (<year>2021</year>). <article-title>Interactional synchrony: signals, mechanisms and benefits</article-title>. <source>Soc. Cogn. Affect. Neurosci</source>. <volume>16</volume>, <fpage>5</fpage>&#x02013;<lpage>18</lpage>. <pub-id pub-id-type="doi">10.1093/scan/nsaa024</pub-id><pub-id pub-id-type="pmid">32128587</pub-id></citation></ref>
<ref id="B30">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kinreich</surname> <given-names>S.</given-names></name> <name><surname>Djalovski</surname> <given-names>A.</given-names></name> <name><surname>Kraus</surname> <given-names>L.</given-names></name> <name><surname>Louzoun</surname> <given-names>Y.</given-names></name> <name><surname>Feldman</surname> <given-names>R.</given-names></name></person-group> (<year>2017</year>). <article-title>Brain-to-brain synchrony during naturalistic social interactions</article-title>. <source>Sci. Rep</source>. <volume>7</volume>:<fpage>17060</fpage>. <pub-id pub-id-type="doi">10.1038/s41598-017-17339-5</pub-id><pub-id pub-id-type="pmid">29213107</pub-id></citation></ref>
<ref id="B31">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kohl</surname> <given-names>S. H.</given-names></name> <name><surname>Mehler</surname> <given-names>D. M. A.</given-names></name> <name><surname>L&#x000FC;hrs</surname> <given-names>M.</given-names></name> <name><surname>Thibault</surname> <given-names>R. T.</given-names></name> <name><surname>Konrad</surname> <given-names>K.</given-names></name> <name><surname>Sorger</surname> <given-names>B.</given-names></name></person-group> (<year>2020</year>). <article-title>The potential of functional near-infrared spectroscopy-based neurofeedback &#x02013; a systematic review and recommendations for best practice</article-title>. <source>Front. Neurosci.</source> <volume>14</volume>:<fpage>594</fpage>. <pub-id pub-id-type="doi">10.3389/fnins.2020.00594</pub-id><pub-id pub-id-type="pmid">32848528</pub-id></citation></ref>
<ref id="B32">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Koike</surname> <given-names>T.</given-names></name> <name><surname>Sumiya</surname> <given-names>M.</given-names></name> <name><surname>Nakagawa</surname> <given-names>E.</given-names></name> <name><surname>Okazaki</surname> <given-names>S.</given-names></name> <name><surname>Sadato</surname> <given-names>N.</given-names></name></person-group> (<year>2019</year>). <article-title>What makes eye contact special? Neural substrates of on-line mutual eye-gaze: a hyperscanning fMRI STUDY</article-title>. <source>eNeuro</source> <volume>6</volume>, <fpage>401</fpage>&#x02013;<lpage>412</lpage>. <pub-id pub-id-type="doi">10.1523/ENEURO.0284-18.2019</pub-id><pub-id pub-id-type="pmid">30834300</pub-id></citation></ref>
<ref id="B33">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Koike</surname> <given-names>T.</given-names></name> <name><surname>Tanabe</surname> <given-names>H. C.</given-names></name> <name><surname>Okazaki</surname> <given-names>S.</given-names></name> <name><surname>Nakagawa</surname> <given-names>E.</given-names></name> <name><surname>Sasaki</surname> <given-names>A. T.</given-names></name> <name><surname>Shimada</surname> <given-names>K.</given-names></name> <etal/></person-group>. (<year>2016</year>). <article-title>Neural substrates of shared attention as social memory: a hyperscanning functional magnetic resonance imaging study</article-title>. <source>Neuroimage</source> <volume>125</volume>, <fpage>401</fpage>&#x02013;<lpage>412</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2015.09.076</pub-id><pub-id pub-id-type="pmid">26514295</pub-id></citation></ref>
<ref id="B34">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kovacevic</surname> <given-names>N.</given-names></name> <name><surname>Ritter</surname> <given-names>P.</given-names></name> <name><surname>Tays</surname> <given-names>W.</given-names></name> <name><surname>Moreno</surname> <given-names>S.</given-names></name> <name><surname>McIntosh</surname> <given-names>A. R.</given-names></name></person-group> (<year>2015</year>). <article-title>My virtual dream&#x0201D;: collective neurofeedback in an immersive art environment</article-title>. <source>PloS ONE</source> <volume>10</volume>:<fpage>e0130129</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0130129</pub-id><pub-id pub-id-type="pmid">26154513</pub-id></citation></ref>
<ref id="B35">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kovach</surname> <given-names>C. K.</given-names></name></person-group> (<year>2017</year>). <article-title>A biased look at phase locking: brief critical review and proposed remedy</article-title>. <source>IEEE Transac. Signal Proc</source>. <volume>65</volume>, <fpage>4468</fpage>&#x02013;<lpage>4480</lpage>. <pub-id pub-id-type="doi">10.1109/TSP.2017.2711517</pub-id></citation>
</ref>
<ref id="B36">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lachaux</surname> <given-names>J. P.</given-names></name> <name><surname>Rodriguez</surname> <given-names>E.</given-names></name> <name><surname>Martinerie</surname> <given-names>J.</given-names></name> <name><surname>Varela</surname> <given-names>F. J.</given-names></name></person-group> (<year>1999</year>). <article-title>Measuring phase synchrony in brain signals</article-title>. <source>Hum. Brain Mapp</source>. <volume>8</volume>, <fpage>194</fpage>&#x02013;<lpage>208</lpage>.<pub-id pub-id-type="pmid">10619414</pub-id></citation></ref>
<ref id="B37">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lee</surname> <given-names>Y.-E.</given-names></name> <name><surname>Kwak</surname> <given-names>N.-S.</given-names></name> <name><surname>Lee</surname> <given-names>S.-W.</given-names></name></person-group> (<year>2020</year>). <article-title>A real-time movement artifact removal method for ambulatory brain-computer interfaces</article-title>. <source>IEEE Transac. Neural Syst. Rehabil</source>. <volume>28</volume>, <fpage>2660</fpage>&#x02013;<lpage>2670</lpage>. <pub-id pub-id-type="doi">10.1109/TNSRE.2020.3040264</pub-id><pub-id pub-id-type="pmid">33232242</pub-id></citation></ref>
<ref id="B38">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Liu</surname> <given-names>N.</given-names></name> <name><surname>Mok</surname> <given-names>C.</given-names></name> <name><surname>Witt</surname> <given-names>E. E.</given-names></name> <name><surname>Pradhan</surname> <given-names>A. H.</given-names></name> <name><surname>Chen</surname> <given-names>J. E.</given-names></name> <name><surname>Reiss</surname> <given-names>A. L.</given-names></name></person-group> (<year>2016</year>). <article-title>NIRS-based hyperscanning reveals inter-brain neural synchronization during cooperative jenga game with face-to-face communication</article-title>. <source>Front. Hum. Neurosci</source>. <volume>10</volume>:<fpage>82</fpage>. <pub-id pub-id-type="doi">10.3389/fnhum.2016.00082</pub-id><pub-id pub-id-type="pmid">27014019</pub-id></citation></ref>
<ref id="B39">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mayo</surname> <given-names>O.</given-names></name> <name><surname>Gordon</surname> <given-names>I.</given-names></name></person-group> (<year>2020</year>). <article-title>In and out of synchrony-behavioral and physiological dynamics of dyadic interpersonal coordination</article-title>. <source>Psychophysiology</source> <volume>57</volume>:<fpage>e13574</fpage>. <pub-id pub-id-type="doi">10.1111/psyp.13574</pub-id><pub-id pub-id-type="pmid">32221984</pub-id></citation></ref>
<ref id="B40">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mehrkanoon</surname> <given-names>S.</given-names></name> <name><surname>Breakspear</surname> <given-names>M.</given-names></name> <name><surname>Britz</surname> <given-names>J.</given-names></name> <name><surname>Boonstra</surname> <given-names>T. W.</given-names></name></person-group> (<year>2014</year>). <article-title>Intrinsic coupling modes in source-reconstructed electroencephalography</article-title>. <source>Brain Connect</source>. <volume>4</volume>, <fpage>812</fpage>&#x02013;<lpage>825</lpage>. <pub-id pub-id-type="doi">10.1089/brain.2014.0280</pub-id><pub-id pub-id-type="pmid">25230358</pub-id></citation></ref>
<ref id="B41">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Miller</surname> <given-names>J. G.</given-names></name> <name><surname>Vrti&#x0010D;ka</surname> <given-names>P.</given-names></name> <name><surname>Cui</surname> <given-names>X.</given-names></name> <name><surname>Shrestha</surname> <given-names>S.</given-names></name> <name><surname>Hosseini</surname> <given-names>S. M. H.</given-names></name> <name><surname>Baker</surname> <given-names>J. M.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>Inter-brain synchrony in mother-child dyads during cooperation: an fNIRS hyperscanning study</article-title>. <source>Neuropsychologia</source> <volume>124</volume>, <fpage>117</fpage>&#x02013;<lpage>124</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuropsychologia.2018.12.021</pub-id><pub-id pub-id-type="pmid">30594570</pub-id></citation></ref>
<ref id="B42">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Misaki</surname> <given-names>M.</given-names></name> <name><surname>Kerr</surname> <given-names>K. L.</given-names></name> <name><surname>Ratliff</surname> <given-names>E. L.</given-names></name> <name><surname>Cosgrove</surname> <given-names>K. T.</given-names></name> <name><surname>Simmons</surname> <given-names>W. K.</given-names></name> <name><surname>Morris</surname> <given-names>A. S.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>Beyond synchrony: the capacity of fMRI hyperscanning for the study of human social interaction</article-title>. <source>Soc. Cogn. Affect. Neurosci</source>. <volume>16</volume>, <fpage>84</fpage>&#x02013;<lpage>92</lpage>. <pub-id pub-id-type="doi">10.1093/scan/nsaa143</pub-id><pub-id pub-id-type="pmid">33104783</pub-id></citation></ref>
<ref id="B43">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Moreau</surname> <given-names>Q.</given-names></name> <name><surname>Dumas</surname> <given-names>G.</given-names></name></person-group> (<year>2021</year>). <article-title>Beyond correlation versus causation: multi-brain neuroscience needs explanation</article-title>. <source>Trends Cogn. Sci</source>. <volume>25</volume>, <fpage>542</fpage>&#x02013;<lpage>543</lpage>. <pub-id pub-id-type="doi">10.1016/j.tics.2021.02.011</pub-id><pub-id pub-id-type="pmid">33752959</pub-id></citation></ref>
<ref id="B44">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mullen</surname> <given-names>T. R.</given-names></name> <name><surname>Kothe</surname> <given-names>C. A. E.</given-names></name> <name><surname>Chi</surname> <given-names>Y. M.</given-names></name> <name><surname>Ojeda</surname> <given-names>A.</given-names></name> <name><surname>Kerth</surname> <given-names>T.</given-names></name> <name><surname>Makeig</surname> <given-names>S.</given-names></name> <etal/></person-group>. (<year>2015</year>). <article-title>Real-time neuroimaging and cognitive monitoring using wearable dry EEG</article-title>. <source>IEEE Trans. Biomed. Eng</source>. <volume>62</volume>, <fpage>2553</fpage>&#x02013;<lpage>2567</lpage>. <pub-id pub-id-type="doi">10.1109/TBME.2015.2481482</pub-id><pub-id pub-id-type="pmid">26415149</pub-id></citation></ref>
<ref id="B45">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>M&#x000FC;ller</surname> <given-names>V.</given-names></name> <name><surname>Lindenberger</surname> <given-names>U.</given-names></name></person-group> (<year>2011</year>). <article-title>Cardiac and respiratory patterns synchronize between persons during choir singing</article-title>. <source>PLoS ONE</source> <volume>6</volume>:<fpage>e24893</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0024893</pub-id><pub-id pub-id-type="pmid">21957466</pub-id></citation></ref>
<ref id="B46">
<citation citation-type="web"><person-group person-group-type="author"><name><surname>Nijholt</surname> <given-names>A.</given-names></name></person-group> (<year>2019</year>). <source>Brain Art: Brain-Computer Interfaces for Artistic Expression. Springer</source>. Available online at: <ext-link ext-link-type="uri" xlink:href="https://play.google.com/store/books/details?id=qC6aDwAAQBAJ">https://play.google.com/store/books/details?id=qC6aDwAAQBAJ</ext-link> (accessed March 29, 2021).</citation>
</ref>
<ref id="B47">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Nolte</surname> <given-names>G.</given-names></name> <name><surname>Bai</surname> <given-names>O.</given-names></name> <name><surname>Wheaton</surname> <given-names>L.</given-names></name> <name><surname>Mari</surname> <given-names>Z.</given-names></name> <name><surname>Vorbach</surname> <given-names>S.</given-names></name> <name><surname>Hallett</surname> <given-names>M.</given-names></name></person-group> (<year>2004</year>). <article-title>Identifying true brain interaction from EEG data using the imaginary part of coherency</article-title>. <source>Clin. Neurophysiol</source>. <volume>115</volume>, <fpage>2292</fpage>&#x02013;<lpage>2307</lpage>. <pub-id pub-id-type="doi">10.1016/j.clinph.2004.04.029</pub-id><pub-id pub-id-type="pmid">15351371</pub-id></citation></ref>
<ref id="B48">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Novembre</surname> <given-names>G.</given-names></name> <name><surname>Iannetti</surname> <given-names>G. D.</given-names></name></person-group> (<year>2021</year>). <article-title>Hyperscanning alone cannot prove causality</article-title>. <source>Multibrain Stimul. Can. Trends Cogn. Sci</source>. <volume>25</volume>, <fpage>96</fpage>&#x02013;<lpage>99</lpage>. <pub-id pub-id-type="doi">10.1016/j.tics.2020.11.003</pub-id><pub-id pub-id-type="pmid">33293210</pub-id></citation></ref>
<ref id="B49">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Noy</surname> <given-names>L.</given-names></name> <name><surname>Levit-Binun</surname> <given-names>N.</given-names></name> <name><surname>Golland</surname> <given-names>Y.</given-names></name></person-group> (<year>2015</year>). <article-title>Being in the zone: physiological markers of togetherness in joint improvisation</article-title>. <source>Front. Hum. Neurosci</source>. <volume>9</volume>:<fpage>187</fpage>. <pub-id pub-id-type="doi">10.3389/fnhum.2015.00187</pub-id><pub-id pub-id-type="pmid">25999832</pub-id></citation></ref>
<ref id="B50">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Nozawa</surname> <given-names>T.</given-names></name> <name><surname>Sasaki</surname> <given-names>Y.</given-names></name> <name><surname>Sakaki</surname> <given-names>K.</given-names></name> <name><surname>Yokoyama</surname> <given-names>R.</given-names></name> <name><surname>Kawashima</surname> <given-names>R.</given-names></name></person-group> (<year>2016</year>). <article-title>Interpersonal frontopolar neural synchronization in group communication: an exploration toward fNIRS hyperscanning of natural interactions</article-title>. <source>Neuroimage</source> <volume>133</volume>, <fpage>484</fpage>&#x02013;<lpage>497</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2016.03.059</pub-id><pub-id pub-id-type="pmid">27039144</pub-id></citation></ref>
<ref id="B51">
<citation citation-type="web"><person-group person-group-type="author"><name><surname>Oppenheim</surname> <given-names>A. V.</given-names></name></person-group> (<year>1999</year>). <source>Discrete-Time Signal Processing. Pearson Education India</source>. Available online at: <ext-link ext-link-type="uri" xlink:href="http://182.160.97.198:8080/xmlui/bitstream/handle/123456789/489/13.%20Cepstrum%20Analysis%20and%20Homomorphic%20Deconvolution.pdf?sequence=14">http://182.160.97.198:8080/xmlui/bitstream/handle/123456789/489/13.%20Cepstrum%20Analysis%20and%20Homomorphic%20Deconvolution.pdf?sequence=14</ext-link></citation>
</ref>
<ref id="B52">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Oullier</surname> <given-names>O.</given-names></name> <name><surname>de Guzman</surname> <given-names>G. C.</given-names></name> <name><surname>Jantzen</surname> <given-names>K. J.</given-names></name> <name><surname>Lagarde</surname> <given-names>J.</given-names></name> <name><surname>Kelso</surname> <given-names>J. A. S.</given-names></name></person-group> (<year>2008</year>). <article-title>Social coordination dynamics: measuring human bonding</article-title>. <source>Soc. Neurosci</source>. <volume>3</volume>, <fpage>178</fpage>&#x02013;<lpage>192</lpage>. <pub-id pub-id-type="doi">10.1080/17470910701563392</pub-id><pub-id pub-id-type="pmid">18552971</pub-id></citation></ref>
<ref id="B53">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pan</surname> <given-names>Y.</given-names></name> <name><surname>Dikker</surname> <given-names>S.</given-names></name> <name><surname>Goldstein</surname> <given-names>P.</given-names></name> <name><surname>Zhu</surname> <given-names>Y.</given-names></name> <name><surname>Yang</surname> <given-names>C.</given-names></name> <name><surname>Hu</surname> <given-names>Y.</given-names></name></person-group> (<year>2020</year>). <article-title>Instructor-learner brain coupling discriminates between instructional approaches and predicts learning</article-title>. <source>Neuroimage</source> <volume>211</volume>:<fpage>116657</fpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2020.116657</pub-id><pub-id pub-id-type="pmid">32068165</pub-id></citation></ref>
<ref id="B54">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Perez Repetto</surname> <given-names>L.</given-names></name> <name><surname>Jasmin</surname> <given-names>E.</given-names></name> <name><surname>Fombonne</surname> <given-names>E.</given-names></name> <name><surname>Gisel</surname> <given-names>E.</given-names></name> <name><surname>Couture</surname> <given-names>M.</given-names></name></person-group> (<year>2017</year>). <article-title>Longitudinal study of sensory features in children with autism spectrum disorder</article-title>. <source>Autism Res. Treat</source>. <volume>2017</volume>:<fpage>1934701</fpage>. <pub-id pub-id-type="doi">10.1155/2017/1934701</pub-id><pub-id pub-id-type="pmid">30060977</pub-id></citation></ref>
<ref id="B55">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>P&#x000E9;rez</surname> <given-names>A.</given-names></name> <name><surname>Dumas</surname> <given-names>G.</given-names></name> <name><surname>Karadag</surname> <given-names>M.</given-names></name> <name><surname>Du&#x000F1;abeitia</surname> <given-names>J. A.</given-names></name></person-group> (<year>2019</year>). <article-title>Differential brain-to-brain entrainment while speaking and listening in native and foreign languages</article-title>. <source>Cortex</source> <volume>111</volume>, <fpage>303</fpage>&#x02013;<lpage>315</lpage>. <pub-id pub-id-type="doi">10.1016/j.cortex.2018.11.026</pub-id><pub-id pub-id-type="pmid">30598230</pub-id></citation></ref>
<ref id="B56">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pickering</surname> <given-names>M. J.</given-names></name> <name><surname>Garrod</surname> <given-names>S.</given-names></name></person-group> (<year>2004</year>). <article-title>The interactive-alignment model: developments and refinements</article-title>. <source>Behav. Brain Sci</source>. <volume>27</volume>, <fpage>212</fpage>&#x02013;<lpage>225</lpage>. <pub-id pub-id-type="doi">10.1017/S0140525X04450055</pub-id><pub-id pub-id-type="pmid">18241490</pub-id></citation></ref>
<ref id="B57">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pickering</surname> <given-names>M. J.</given-names></name> <name><surname>Garrod</surname> <given-names>S.</given-names></name></person-group> (<year>2013</year>). <article-title>Authors&#x00027; response: forward models and their implications for production, comprehension, and dialogue [Review of Authors&#x00027; response: forward models and their implications for production, comprehension, and dialogue]</article-title>. <source>Behav. Brain Sci</source>. <volume>36</volume>, <fpage>377</fpage>&#x02013;<lpage>392</lpage>. <pub-id pub-id-type="doi">10.1017/S0140525X12003238</pub-id><pub-id pub-id-type="pmid">24049786</pub-id></citation></ref>
<ref id="B58">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pietromonaco</surname> <given-names>P. R.</given-names></name> <name><surname>Collins</surname> <given-names>N. L.</given-names></name></person-group> (<year>2017</year>). <article-title>Interpersonal mechanisms linking close relationships to health</article-title>. <source>Am. Psychol</source>. <volume>72</volume>, <fpage>531</fpage>&#x02013;<lpage>542</lpage>. <pub-id pub-id-type="doi">10.1037/amp0000129</pub-id><pub-id pub-id-type="pmid">28880100</pub-id></citation></ref>
<ref id="B59">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Reindl</surname> <given-names>V.</given-names></name> <name><surname>Gerloff</surname> <given-names>C.</given-names></name> <name><surname>Scharke</surname> <given-names>W.</given-names></name> <name><surname>Konrad</surname> <given-names>K.</given-names></name></person-group> (<year>2018</year>). <article-title>Brain-to-brain synchrony in parent-child dyads and the relationship with emotion regulation revealed by fNIRS-based hyperscanning</article-title>. <source>Neuroimage</source> <volume>178</volume>, <fpage>493</fpage>&#x02013;<lpage>502</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2018.05.060</pub-id><pub-id pub-id-type="pmid">29807152</pub-id></citation></ref>
<ref id="B60">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Reinero</surname> <given-names>D. A.</given-names></name> <name><surname>Dikker</surname> <given-names>S.</given-names></name> <name><surname>Van Bavel</surname> <given-names>J. J.</given-names></name></person-group> (<year>2021</year>). <article-title>Inter-brain synchrony in teams predicts collective performance</article-title>. <source>Soc. Cogn. Affect. Neurosci</source>. <volume>16</volume>, <fpage>43</fpage>&#x02013;<lpage>57</lpage>. <pub-id pub-id-type="doi">10.1093/scan/nsaa135</pub-id><pub-id pub-id-type="pmid">32991728</pub-id></citation></ref>
<ref id="B61">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Salminen</surname> <given-names>M.</given-names></name> <name><surname>J&#x000E1;rvel&#x000E1;</surname> <given-names>S.</given-names></name> <name><surname>Ruonala</surname> <given-names>A.</given-names></name> <name><surname>Harjunen</surname> <given-names>V.</given-names></name> <name><surname>Jacucci</surname> <given-names>G.</given-names></name> <name><surname>Hamari</surname> <given-names>J.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>Evoking physiological synchrony and empathy using social VR with biofeedback</article-title>. <source>IEEE Transac. Affect. Comput</source>. 1. <pub-id pub-id-type="doi">10.1109/TAFFC.2019.2958657</pub-id></citation>
</ref>
<ref id="B62">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Scholkmann</surname> <given-names>F.</given-names></name> <name><surname>Holper</surname> <given-names>L.</given-names></name> <name><surname>Wolf</surname> <given-names>U.</given-names></name> <name><surname>Wolf</surname> <given-names>M.</given-names></name></person-group> (<year>2013</year>). <article-title>A new methodical approach in neuroscience: assessing inter-personal brain coupling using functional near-infrared imaging (fNIRI) hyperscanning</article-title>. <source>Front. Hum. Neurosci</source>. <volume>7</volume>:<fpage>813</fpage>. <pub-id pub-id-type="doi">10.3389/fnhum.2013.00813</pub-id><pub-id pub-id-type="pmid">24348362</pub-id></citation></ref>
<ref id="B63">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Shaw</surname> <given-names>J. C.</given-names></name></person-group> (<year>1984</year>). <article-title>Correlation and coherence analysis of the EEG: a selective tutorial review</article-title>. <source>Int. J. Psychophysiol</source>. <volume>1</volume>, <fpage>255</fpage>&#x02013;<lpage>266</lpage>. <pub-id pub-id-type="doi">10.1016/0167-8760(84)90045-X</pub-id><pub-id pub-id-type="pmid">6542910</pub-id></citation></ref>
<ref id="B64">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Stephens</surname> <given-names>G. J.</given-names></name> <name><surname>Silbert</surname> <given-names>L. J.</given-names></name> <name><surname>Hasson</surname> <given-names>U.</given-names></name></person-group> (<year>2010a</year>). <article-title>Speaker&#x02013;listener neural coupling underlies successful communication</article-title>. <source>Proc. Natl. Acad. Sci</source>. <volume>107</volume>, <fpage>14425</fpage>&#x02013;<lpage>14430</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.1008662107</pub-id><pub-id pub-id-type="pmid">20660768</pub-id></citation></ref>
<ref id="B65">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Stephens</surname> <given-names>G. J.</given-names></name> <name><surname>Silbert</surname> <given-names>L. J.</given-names></name> <name><surname>Hasson</surname> <given-names>U.</given-names></name></person-group> (<year>2010b</year>). <article-title>Speaker&#x02013;listener neural coupling underlies successful communication</article-title>. <source>Proc. Natl. Acad. Sci. U.S.A</source>. <volume>107</volume>, <fpage>14425</fpage>&#x02013;<lpage>14430</lpage>.<pub-id pub-id-type="pmid">20660768</pub-id></citation></ref>
<ref id="B66">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tang</surname> <given-names>H.</given-names></name> <name><surname>Mai</surname> <given-names>X.</given-names></name> <name><surname>Wang</surname> <given-names>S.</given-names></name> <name><surname>Zhu</surname> <given-names>C.</given-names></name> <name><surname>Krueger</surname> <given-names>F.</given-names></name> <name><surname>Liu</surname> <given-names>C.</given-names></name></person-group> (<year>2016</year>). <article-title>Interpersonal brain synchronization in the right temporo-parietal junction during face-to-face economic exchange</article-title>. <source>Soc. Cogn. Affect. Neurosci</source>. <volume>11</volume>, <fpage>23</fpage>&#x02013;<lpage>32</lpage>. <pub-id pub-id-type="doi">10.1093/scan/nsv092</pub-id><pub-id pub-id-type="pmid">26211014</pub-id></citation></ref>
<ref id="B67">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tognoli</surname> <given-names>E.</given-names></name> <name><surname>Lagarde</surname> <given-names>J.</given-names></name> <name><surname>DeGuzman</surname> <given-names>G. C.</given-names></name> <name><surname>Kelso</surname> <given-names>J. A. S.</given-names></name></person-group> (<year>2007</year>). <article-title>The phi complex as a neuromarker of human social coordination</article-title>. <source>Proc. Natl. Acad. Sci. U.S.A</source>. <volume>104</volume>, <fpage>8190</fpage>&#x02013;<lpage>8195</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.0611453104</pub-id><pub-id pub-id-type="pmid">17470821</pub-id></citation></ref>
<ref id="B68">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Van Berkum</surname> <given-names>J. J. A.</given-names></name> <name><surname>Holleman</surname> <given-names>B.</given-names></name> <name><surname>Nieuwland</surname> <given-names>M.</given-names></name> <name><surname>Otten</surname> <given-names>M.</given-names></name> <name><surname>Murre</surname> <given-names>J.</given-names></name></person-group> (<year>2009</year>). <article-title>Right or wrong? The brain&#x00027;s fast response to morally objectionable statements</article-title>. <source>Psychol. Sci</source>. <volume>20</volume>, <fpage>1092</fpage>&#x02013;<lpage>1099</lpage>. <pub-id pub-id-type="doi">10.1111/j.1467-9280.2009.02411.x</pub-id><pub-id pub-id-type="pmid">19656340</pub-id></citation></ref>
<ref id="B69">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>van Hoogdalem</surname> <given-names>L. E.</given-names></name> <name><surname>Feijs</surname> <given-names>H. M. E.</given-names></name> <name><surname>Bramer</surname> <given-names>W. M.</given-names></name> <name><surname>Ismail</surname> <given-names>S. Y.</given-names></name> <name><surname>van Dongen</surname> <given-names>J. D. M.</given-names></name></person-group> (<year>2020</year>). <article-title>The effectiveness of neurofeedback therapy as an alternative treatment for autism spectrum disorders in children: a systematic review</article-title>. <source>J. Psychophysiol</source>. <volume>35</volume>, <fpage>102</fpage>&#x02013;<lpage>115</lpage>. <pub-id pub-id-type="doi">10.1027/0269-8803/a000265</pub-id></citation>
</ref>
<ref id="B70">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>van Ulzen</surname> <given-names>N. R.</given-names></name> <name><surname>Lamoth</surname> <given-names>C. J. C.</given-names></name> <name><surname>Daffertshofer</surname> <given-names>A.</given-names></name> <name><surname>Semin</surname> <given-names>G. R.</given-names></name> <name><surname>Beek</surname> <given-names>P. J.</given-names></name></person-group> (<year>2008</year>). <article-title>Characteristics of instructed and uninstructed interpersonal coordination while walking side-by-side</article-title>. <source>Neurosci. Lett</source>. <volume>432</volume>, <fpage>88</fpage>&#x02013;<lpage>93</lpage>. <pub-id pub-id-type="doi">10.1016/j.neulet.2007.11.070</pub-id><pub-id pub-id-type="pmid">18242846</pub-id></citation></ref>
<ref id="B71">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Varlet</surname> <given-names>M.</given-names></name> <name><surname>Marin</surname> <given-names>L.</given-names></name> <name><surname>Lagarde</surname> <given-names>J.</given-names></name> <name><surname>Bardy</surname> <given-names>B. G.</given-names></name></person-group> (<year>2011</year>). <article-title>Social postural coordination</article-title>. <source>J. Exp. Psychol. Hum. Percept. Perform</source>. <volume>37</volume>, <fpage>473</fpage>&#x02013;<lpage>483</lpage>. <pub-id pub-id-type="doi">10.1037/a0020552</pub-id><pub-id pub-id-type="pmid">20854001</pub-id></citation></ref>
<ref id="B72">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Williams</surname> <given-names>N. S.</given-names></name> <name><surname>McArthur</surname> <given-names>G. M.</given-names></name> <name><surname>Badcock</surname> <given-names>N. A.</given-names></name></person-group> (<year>2020</year>). <article-title>10 years of EPOC: a scoping review of Emotiv&#x00027;s portable EEG device</article-title>. <source>bioRxiv [Preprint]</source>. <pub-id pub-id-type="doi">10.1101/2020.07.14.202085</pub-id></citation>
</ref>
<ref id="B73">
<citation citation-type="web"><person-group person-group-type="author"><name><surname>Wright</surname> <given-names>M.</given-names></name> <name><surname>Momeni</surname> <given-names>A.</given-names></name></person-group> (<year>2011</year>). <source>Opensound Control: State of the Art 2003</source>. Available online at: <ext-link ext-link-type="uri" xlink:href="http://opensoundcontrol.org/files/Open&#x0002B;Sound&#x0002B;Control-State&#x0002B;Of&#x0002B;The&#x0002B;Art.Pdf">http://opensoundcontrol.org/files/Open&#x0002B;Sound&#x0002B;Control-State&#x0002B;Of&#x0002B;The&#x0002B;Art.Pdf</ext-link></citation>
</ref>
<ref id="B74">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Yun</surname> <given-names>K.</given-names></name></person-group> (<year>2013</year>). <article-title>On the same wavelength: face-to-face communication increases interpersonal neural synchronization [Review of on the same wavelength: face-to-face communication increases interpersonal neural synchronization]</article-title>. <source>J. Neurosci</source>. <volume>33</volume>, <fpage>5081</fpage>&#x02013;<lpage>5082</lpage>. <pub-id pub-id-type="doi">10.1523/JNEUROSCI.0063-13.2013</pub-id><pub-id pub-id-type="pmid">23516273</pub-id></citation></ref>
<ref id="B75">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zamm</surname> <given-names>A.</given-names></name> <name><surname>Debener</surname> <given-names>S.</given-names></name> <name><surname>Bauer</surname> <given-names>A.-K. R.</given-names></name> <name><surname>Bleichner</surname> <given-names>M. G.</given-names></name> <name><surname>Demos</surname> <given-names>A. P.</given-names></name> <name><surname>Palmer</surname> <given-names>C.</given-names></name></person-group> (<year>2018a</year>). <article-title>Amplitude envelope correlations measure synchronous cortical oscillations in performing musicians</article-title>. <source>Ann. N. Y. Acad. Sci</source>. <volume>1423</volume>, <fpage>251</fpage>&#x02013;<lpage>263</lpage>. <pub-id pub-id-type="doi">10.1111/nyas.13738</pub-id><pub-id pub-id-type="pmid">29756657</pub-id></citation></ref>
<ref id="B76">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zamm</surname> <given-names>A.</given-names></name> <name><surname>Wang</surname> <given-names>Y.</given-names></name> <name><surname>Palmer</surname> <given-names>C.</given-names></name></person-group> (<year>2018b</year>). <article-title>Musicians&#x00027; natural frequencies of performance display optimal temporal stability</article-title>. <source>J. Biol. Rhythms</source> <volume>33</volume>, <fpage>432</fpage>&#x02013;<lpage>440</lpage>. <pub-id pub-id-type="doi">10.1177/0748730418783651</pub-id><pub-id pub-id-type="pmid">29940801</pub-id></citation></ref>
</ref-list>
<fn-group>
<fn fn-type="financial-disclosure"><p><bold>Funding.</bold> This work was supported by Stichting Niemeijer Fonds, The Netherlands Organization for Scientific Research grant &#x00023;275-89-018 and &#x00023;406.18.GO.024, Creative Industries Fund NL, TodaysArt, Marina Abramovic Institute, and Fundaci&#x000F3;n Telef&#x000F3;nica.</p>
</fn>
</fn-group>
</back>
</article>