<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Psychol.</journal-id>
<journal-title>Frontiers in Psychology</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Psychol.</abbrev-journal-title>
<issn pub-type="epub">1664-1078</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpsyg.2018.02682</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Psychology</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>How Live Music Moves Us: Head Movement Differences in Audiences to Live Versus Recorded Music</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Swarbrick</surname> <given-names>Dana</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/603918/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Bosnyak</surname> <given-names>Dan</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/50293/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Livingstone</surname> <given-names>Steven R.</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="author-notes" rid="fn002"><sup>&#x2020;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/91364/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Bansal</surname> <given-names>Jotthi</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/429669/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Marsh-Rollo</surname> <given-names>Susan</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/603559/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Woolhouse</surname> <given-names>Matthew H.</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/130453/overview"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Trainor</surname> <given-names>Laurel J.</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/3358/overview"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Department of Psychology, Neuroscience &#x0026; Behaviour, McMaster University</institution>, <addr-line>Hamilton, ON</addr-line>, <country>Canada</country></aff>
<aff id="aff2"><sup>2</sup><institution>McMaster Institute for Music and the Mind, McMaster University</institution>, <addr-line>Hamilton, ON</addr-line>, <country>Canada</country></aff>
<aff id="aff3"><sup>3</sup><institution>Digital Music Lab, School of the Arts, McMaster University</institution>, <addr-line>Hamilton, ON</addr-line>, <country>Canada</country></aff>
<aff id="aff4"><sup>4</sup><institution>Rotman Research Institute, Baycrest Hospital</institution>, <addr-line>Toronto, ON</addr-line>, <country>Canada</country></aff>
<author-notes>
<fn fn-type="edited-by"><p>Edited by: Jonathan B. Fritz, University of Maryland, College Park, United States</p></fn>
<fn fn-type="edited-by"><p>Reviewed by: Enrico Glerean, Aalto University, Finland; Jessica Phillips-Silver, Georgetown University Medical Center, United States</p></fn>
<corresp id="c001">&#x002A;Correspondence: Laurel J. Trainor, <email>ljt@mcmaster.ca</email></corresp>
<fn fn-type="other" id="fn002"><p><sup>&#x2020;</sup>Present address: Steven R. Livingstone, Department of Computer Science, University of Otago, Dunedin, New Zealand</p></fn>
<fn fn-type="other" id="fn003"><p>This article was submitted to Auditory Cognitive Neuroscience, a section of the journal Frontiers in Psychology</p></fn></author-notes>
<pub-date pub-type="epub">
<day>11</day>
<month>01</month>
<year>2019</year>
</pub-date>
<pub-date pub-type="collection">
<year>2018</year>
</pub-date>
<volume>9</volume>
<elocation-id>2682</elocation-id>
<history>
<date date-type="received">
<day>21</day>
<month>08</month>
<year>2018</year>
</date>
<date date-type="accepted">
<day>13</day>
<month>12</month>
<year>2018</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2019 Swarbrick, Bosnyak, Livingstone, Bansal, Marsh-Rollo, Woolhouse and Trainor.</copyright-statement>
<copyright-year>2019</copyright-year>
<copyright-holder>Swarbrick, Bosnyak, Livingstone, Bansal, Marsh-Rollo, Woolhouse and Trainor</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p></license>
</permissions>
<abstract>
<p>A live music concert is a pleasurable social event that is among the most visceral and memorable forms of musical engagement. But what inspires listeners to attend concerts, sometimes at great expense, when they could listen to recordings at home? An iconic aspect of popular concerts is engaging with other audience members through moving to the music. Head movements, in particular, reflect emotion and have social consequences when experienced with others. Previous studies have explored the affiliative social engagement experienced among people moving together to music. But live concerts have other features that might also be important, such as that during a live performance the music unfolds in a unique and not predetermined way, potentially increasing anticipation and feelings of involvement for the audience. Being in the same space as the musicians might also be exciting. Here we controlled for simply being in an audience to examine whether factors inherent to live performance contribute to the concert experience. We used motion capture to compare head movement responses at a live album release concert featuring Canadian rock star Ian Fletcher Thornley, and at a concert without the performers where the same songs were played from the recorded album. We also examined effects of a prior connection with the performers by comparing fans and neutral-listeners, while controlling for familiarity with the songs, as the album had not yet been released. Head movements were faster during the live concert than the album-playback concert. Self-reported fans moved faster and exhibited greater levels of rhythmic entrainment than neutral-listeners. These results indicate that live music engages listeners to a greater extent than pre-recorded music and that a pre-existing admiration for the performers also leads to higher engagement.</p>
</abstract>
<kwd-group>
<kwd>live concert</kwd>
<kwd>recorded music</kwd>
<kwd>music</kwd>
<kwd>fan</kwd>
<kwd>entrainment</kwd>
<kwd>movement</kwd>
<kwd>motion capture</kwd>
</kwd-group>
<contract-sponsor id="cn001">Social Sciences and Humanities Research Council of Canada<named-content content-type="fundref-id">10.13039/501100000155</named-content></contract-sponsor>
<contract-sponsor id="cn002">Canada Foundation for Innovation<named-content content-type="fundref-id">10.13039/501100000196</named-content></contract-sponsor>
<counts>
<fig-count count="2"/>
<table-count count="1"/>
<equation-count count="0"/>
<ref-count count="99"/>
<page-count count="11"/>
<word-count count="0"/>
</counts>
</article-meta>
</front>
<body>
<sec><title>Introduction</title>
<p>Music is a universal social phenomenon that has traditionally been experienced in a live context (<xref ref-type="bibr" rid="B62">Nettl and Russell, 1998</xref>; <xref ref-type="bibr" rid="B33">Freeman, 2000</xref>). The advent of recording technology in the late 19th century heralded a cultural shift in the way that people experienced music, allowing for the convenience of private, in-home consumption (<xref ref-type="bibr" rid="B59">Moreau, 2013</xref>). While technology has provided a low-cost, convenient method for music listening, many people continue to attend live concerts, sometimes at great expense in uncomfortable settings (<xref ref-type="bibr" rid="B3">Baxter-Moore and Kitts, 2016</xref>; <xref ref-type="bibr" rid="B6">Brown and Knox, 2017</xref>). What is it about the experience that motivates listeners to attend live concerts? A survey found that listeners&#x2019; strongest musical experiences often took place at live events (<xref ref-type="bibr" rid="B46">Lamont, 2011</xref>). Two factors that likely contribute critically to the enjoyment of live concerts are (1) people like the social connexion of experiencing music with other people (<xref ref-type="bibr" rid="B10">Burland and Pitts, 2014</xref>; <xref ref-type="bibr" rid="B6">Brown and Knox, 2017</xref>) and (2) people like the feeling of being connected to the performers, by being in the same physical space together, with the potential for performers to directly engage the audience (<xref ref-type="bibr" rid="B79">Silverberg et al., 2013</xref>; <xref ref-type="bibr" rid="B48">Leante, 2016</xref>), and by experiencing a unique live performance as it unfolds over time (<xref ref-type="bibr" rid="B6">Brown and Knox, 2017</xref>). Every live performance is idiosyncratic such that events unfold organically and unpredictably, unlike when listening to a recording in which there is no possibility for an audience to directly affect what a performer has already created.</p>
<p>The social effects of experiencing music with other people have been studied to a greater extent than the effects of experiencing a live performance (<xref ref-type="bibr" rid="B33">Freeman, 2000</xref>; <xref ref-type="bibr" rid="B28">Egermann et al., 2011</xref>; <xref ref-type="bibr" rid="B69">Rennung and Goritz, 2016</xref>; <xref ref-type="bibr" rid="B83">Stupacher et al., 2017</xref>). Here we examined the effects of live performance while controlling for the social setting. We compared people who listened to a live performance (specifically, a record release party by Canadian rock star Ian Fletcher Thornley&#x2019;s 2015 solo album <italic>Secrets</italic>) to people who listened in a group in the same venue without live performers to the album recordings of the same songs from <italic>Secrets</italic>. Recently, research on audiences of live performances has gained interest (<xref ref-type="bibr" rid="B28">Egermann et al., 2011</xref>; <xref ref-type="bibr" rid="B10">Burland and Pitts, 2014</xref>; <xref ref-type="bibr" rid="B23">Danielsen and Helseth, 2016</xref>; <xref ref-type="bibr" rid="B5">Bradby, 2017</xref>; <xref ref-type="bibr" rid="B6">Brown and Knox, 2017</xref>), in part because audiences provide an ecologically valid setting for examining group dynamics. Audience experience has been examined with a variety of techniques including real-time subjective responses (<xref ref-type="bibr" rid="B56">McAdams, 2004</xref>; <xref ref-type="bibr" rid="B81">Stevens et al., 2009</xref>, <xref ref-type="bibr" rid="B80">2014</xref>; <xref ref-type="bibr" rid="B27">Egermann et al., 2013</xref>), social networking (<xref ref-type="bibr" rid="B25">Deller, 2011</xref>), video analysis (<xref ref-type="bibr" rid="B12">Chan et al., 2013</xref>; <xref ref-type="bibr" rid="B79">Silverberg et al., 2013</xref>; <xref ref-type="bibr" rid="B80">Stevens et al., 2014</xref>; <xref ref-type="bibr" rid="B48">Leante, 2016</xref>; <xref ref-type="bibr" rid="B84">Theodorou et al., 2016</xref>) and physiological measurement (<xref ref-type="bibr" rid="B31">Fancourt and Williamon, 2016</xref>; <xref ref-type="bibr" rid="B4">Bernardi et al., 2017</xref>). It is important to understand effects of the concert setting because attendance may increase health: attending a musical performance was found to reduce stress hormones in audience members (<xref ref-type="bibr" rid="B31">Fancourt and Williamon, 2016</xref>) and a 10-year longitudinal study suggested that engagement in cultural events, including concerts, may protect against age-related cognitive decline (<xref ref-type="bibr" rid="B30">Fancourt and Steptoe, 2018</xref>).</p>
<p>Enjoying music with other listeners may contribute powerfully to the concert experience. Observers of concert audiences judged synchronously moving listeners as experiencing greater rapport and similar psychological states compared to those moving asynchronously (<xref ref-type="bibr" rid="B45">Lakens and Stel, 2011</xref>). After adults move in synchrony, even when unaware of their synchronised movements, they remember more about each other, express liking each other more, and show greater levels of trust and cooperation compared to after moving asynchronously (<xref ref-type="bibr" rid="B40">Hove and Risen, 2009</xref>; <xref ref-type="bibr" rid="B92">Wiltermuth and Heath, 2009</xref>; <xref ref-type="bibr" rid="B89">Valdesolo et al., 2010</xref>; <xref ref-type="bibr" rid="B88">Valdesolo and DeSteno, 2011</xref>; <xref ref-type="bibr" rid="B47">Launay et al., 2013</xref>; <xref ref-type="bibr" rid="B95">Woolhouse et al., 2016</xref>). More broadly, periodic movements and physiological rhythms, such as breathing and heart rate, tend to synchronise unconsciously among people in a group (<xref ref-type="bibr" rid="B70">Richardson et al., 2007</xref>; <xref ref-type="bibr" rid="B91">van Ulzen et al., 2008</xref>; <xref ref-type="bibr" rid="B60">Morris, 2010</xref>; <xref ref-type="bibr" rid="B21">Codrons et al., 2014</xref>; <xref ref-type="bibr" rid="B58">Miyata et al., 2018</xref>).</p>
<p>Entrainment is defined as the ability to synchronise movements with an external auditory stimulus, in this case the timing regularities of music (<xref ref-type="bibr" rid="B67">Phillips-Silver and Keller, 2012</xref>). In humans, synchronisation is supported by connections between auditory and motor cortices (<xref ref-type="bibr" rid="B71">Sakai et al., 1999</xref>; <xref ref-type="bibr" rid="B41">Janata and Grafton, 2003</xref>; <xref ref-type="bibr" rid="B37">Grahn and Brett, 2007</xref>; <xref ref-type="bibr" rid="B98">Zatorre et al., 2007</xref>; <xref ref-type="bibr" rid="B36">Fujioka et al., 2012</xref>) and manifests as oscillatory activity measured in EEG and MEG (<xref ref-type="bibr" rid="B74">Schroeder and Lakatos, 2009</xref>; <xref ref-type="bibr" rid="B1">Arnal and Giraud, 2012</xref>; <xref ref-type="bibr" rid="B36">Fujioka et al., 2012</xref>, <xref ref-type="bibr" rid="B35">2015</xref>; <xref ref-type="bibr" rid="B22">Cravo et al., 2013</xref>; <xref ref-type="bibr" rid="B11">Calderone et al., 2014</xref>; <xref ref-type="bibr" rid="B16">Cirelli et al., 2014a</xref>; <xref ref-type="bibr" rid="B13">Chang et al., 2018a</xref>). Interestingly, few non-human species entrain movements to auditory regularities (<xref ref-type="bibr" rid="B57">Merker et al., 2009</xref>; <xref ref-type="bibr" rid="B66">Patel et al., 2009</xref>; <xref ref-type="bibr" rid="B72">Schachner et al., 2009</xref>). The connection between movement synchronisation and social-emotional engagement may have deep evolutionary roots in humans. Infants are not yet able to coordinate their movements to entrain to a musical beat, although they do move faster to music with a faster compared to slower tempo (<xref ref-type="bibr" rid="B99">Zentner and Eerola, 2010</xref>). Yet if an infant as young as 14 months is bounced to music synchronously with the movements of another adult, the infant is more likely to help that adult (e.g., to pick up &#x201C;accidentally&#x201D; dropped objects needed to complete a task) compared to if an infant is bounced asynchronously with the adult (<xref ref-type="bibr" rid="B19">Cirelli et al., 2014c</xref>). Later work revealed that this increased helpfulness extends to friends of the experimenter who bounced with them (<xref ref-type="bibr" rid="B20">Cirelli et al., 2016</xref>). In another study, infants who were bounced to music with stuffed animals, choose animals that bounced synchronously with them over animals that bounced asynchronously. These studies indicate that synchronisation of movement with others during music listening is a cue that even infants use in the development of social-emotional bonds and altruistic behaviours (<xref ref-type="bibr" rid="B86">Trainor and Cirelli, 2015</xref>; <xref ref-type="bibr" rid="B18">Cirelli et al., 2018</xref>).</p>
<p>We examined the effect of live music while controlling for the effects of being with others in an audience. Little research has examined differences between live and recorded performances by manipulating the presence and absence of the performer. <xref ref-type="bibr" rid="B78">Shoda et al. (2016)</xref> reported that the heartbeats of audience members at a live performance exhibited greater entrainment with the musical rhythm than those of listeners at a pre-recorded performance. Performer presence was also found to produce greater relaxation in audience members compared to those listening to a recording (<xref ref-type="bibr" rid="B78">Shoda et al., 2016</xref>). Contemporary popular performers often play variations of recorded works at live performances (<xref ref-type="bibr" rid="B76">Shoda and Adachi, 2015</xref>), suggesting a novelty factor for listeners. <xref ref-type="bibr" rid="B6">Brown and Knox (2017)</xref> found that audience members consider this musical novelty as an important motivator for concert attendance. Live concerts also enable audience members to experience an in-person relationship with the performer. Performers can also be influenced by the presence of an audience, and live performances can be acoustically and energetically different than those recorded in the studio (<xref ref-type="bibr" rid="B97">Zajonc, 1965</xref>; <xref ref-type="bibr" rid="B96">Yoshie et al., 2016</xref>; <xref ref-type="bibr" rid="B5">Bradby, 2017</xref>).</p>
<p>We used head movement responses as our main measure of audience experience for several reasons. Moving to the beat during music listening is culturally ubiquitous, with collective movement a hallmark of the contemporary concert experience (<xref ref-type="bibr" rid="B98">Zatorre et al., 2007</xref>; <xref ref-type="bibr" rid="B53">Madison et al., 2011</xref>; <xref ref-type="bibr" rid="B42">Janata et al., 2012</xref>; <xref ref-type="bibr" rid="B24">Davies et al., 2013</xref>; <xref ref-type="bibr" rid="B54">Madison and Sioros, 2014</xref>; <xref ref-type="bibr" rid="B83">Stupacher et al., 2017</xref>). Individuals use a range of movements when listening to music, from foot tapping to head nodding, to whole body movement (<xref ref-type="bibr" rid="B49">Leman and God&#x00F8;y, 2010</xref>). Head movements are particularly relevant as they are a reliable indicator of rhythmic entrainment (<xref ref-type="bibr" rid="B85">Toiviainen et al., 2010</xref>; <xref ref-type="bibr" rid="B7">Burger et al., 2013</xref>), reveal communication patterns between performers (<xref ref-type="bibr" rid="B15">Chang et al., 2017</xref>), reveal directional and emotional communication patterns (Chang et al., unpublished), and even predict who will &#x201C;match&#x201D; during speed dating (<xref ref-type="bibr" rid="B14">Chang et al., 2018b</xref>). Movement of the head alone&#x2014;but not legs alone&#x2014;affects how ambiguous auditorily-presented rhythms are interpreted (<xref ref-type="bibr" rid="B68">Phillips-Silver and Trainor, 2008</xref>). This interaction between head movement and auditory perception likely involves the vestibular system located in the inner ear which processes proprioceptive information about head movements (<xref ref-type="bibr" rid="B87">Trainor et al., 2009</xref>). Head movements also encode emotional information (<xref ref-type="bibr" rid="B52">Livingstone and Palmer, 2016</xref>; Chang et al., unpublished), and may function as a form of non-verbal communication in a noisy environment (<xref ref-type="bibr" rid="B39">Harrigan et al., 2008</xref>). Head movements provide information about the nature of an emotion being communicated (<xref ref-type="bibr" rid="B29">Ekman and Friesen, 1967</xref>; <xref ref-type="bibr" rid="B94">Witkower and Tracy, 2018</xref>). Furthermore, movement smoothness (which increases with movement speed) is greater when communicating joy than a neutral emotion or sadness (<xref ref-type="bibr" rid="B43">Kang and Gross, 2016</xref>). Horizontal head movements and forward velocity communicate happiness even without the context provided by facial expression or vocal content (<xref ref-type="bibr" rid="B52">Livingstone and Palmer, 2016</xref>). Additionally, movement vigour (average speed) and movement distance have been shown to convey the intensity of emotions (<xref ref-type="bibr" rid="B2">Atkinson et al., 2004</xref>). <xref ref-type="bibr" rid="B51">Leow et al. (2015)</xref> found that, even when asked to walk at the same tempo, participants walked more vigorously (faster) to more familiar music. One study found that during music listening, greater head speed was correlated with increased spectral flux in low frequencies (associated with greater presence of kick drum and bass guitar) and in high frequencies (associated with hi-hat and cymbals or liveliness of a rhythm), as well as with greater percussiveness, but head speed was not found to be related to tempo (<xref ref-type="bibr" rid="B7">Burger et al., 2013</xref>).</p>
<p>In summary, there are many possible factors contributing to movement during music listening including biological imperatives, emotions, and the presence of others. These factors have been studied in highly controlled laboratory settings but have yet to be explored in real-world music listening contexts. In the present study, we were interested in how a live concert affected audience head movements as an index of engagement, specifically, by comparing the movements of concertgoers who experienced a live performance versus a recorded version of the same songs. We were particularly interested in the measure of vigour. Following previous researchers, we operationally defined movement vigour as the average speed of movement over a time interval, regardless of direction (specifically, head distance travelled within a song divided by the total length of the song, giving a value in millimetres per second) (<xref ref-type="bibr" rid="B2">Atkinson et al., 2004</xref>; <xref ref-type="bibr" rid="B55">Mazzoni et al., 2007</xref>; <xref ref-type="bibr" rid="B99">Zentner and Eerola, 2010</xref>). We were also interested in how head movements might be influenced by audience members&#x2019; prior admiration for the performers (i.e., their Listener-preference). People are motivated to attend music concerts when they hold a strong preference for the musicians&#x2019; work. Musical preferences for genres and artists also play a role in defining social affiliations, particularly during adolescence, where they appear to function as a &#x2018;badge of identity&#x2019; within a social group (<xref ref-type="bibr" rid="B63">North and Hargreaves, 1999</xref>; <xref ref-type="bibr" rid="B61">Mulder et al., 2010</xref>). &#x2018;Fans&#x2019; of a particular performer would be expected to enjoy musical performances by that performer, in part because the familiarity gained from repeated exposure to recordings of their music would be expected to increase enjoyment of the performer&#x2019;s music in general (<xref ref-type="bibr" rid="B73">Schellenberg et al., 2008</xref>; <xref ref-type="bibr" rid="B90">van den Bosch et al., 2013</xref>). To examine the effect of audience members&#x2019; prior preferences for the band, we recruited fans of the performer Ian Fletcher Thornley, along with na&#x00EF;ve listeners who expressed no particular preference for the performer. Since the album had not yet been released prior to the concerts, the effects of song familiarity were controlled while examining differences between fans and neutral listeners as neither group had heard the songs prior to the concerts.</p>
<p>In sum, we examined the effects of live versus pre-recorded music and fan status on audience engagement with the music through head movements. Self-reported Fans and Neutral-preference listeners were separately recruited, and randomly assigned to attend one of two concerts. The concerts served as the record release event for Canadian rock star Ian Fletcher Thornley&#x2019;s 2015 solo album <italic>Secrets</italic>, featuring new unreleased music. In the Live concert, audience members experienced a live performance by the musicians, while in the Album-playback concert, listeners heard an audio recording of the same songs from the <italic>Secrets</italic> album. Both concerts were held in the LIVELab, a 106-seat performance hall equipped with a 25-camera optical motion capture system. Head movements of participants were recorded simultaneously throughout each of the two concerts (Supplementary Figure <xref ref-type="supplementary-material" rid="SM1">S1</xref>). Two aspects of head movement were examined: (1) vigour and (2) entrainment to the beat of the music. We hypothesised that head movements would be faster and better entrained when audiences experienced a live concert compared to a pre-recorded version of the music. We further hypothesised that fans of the performer would exhibit faster movement, and entrain better to the rhythm, compared to neutral listeners.</p>
</sec>
<sec id="s1" sec-type="materials|methods">
<title>Materials and Methods</title>
<sec><title>Participants</title>
<p>Fans of the performer were recruited through contests advertised in social media (<italic>n</italic> = 39). Neutral-listeners who expressed no specific preference for Ian Fletcher Thornley (<italic>n</italic> = 21) were recruited for course credit through McMaster University&#x2019;s online research portal (<italic>n</italic> = 3), social media and flyers circulated across campus and in music stores (<italic>n</italic> = 18). Self-asserted Fan-status was verified via a follow-up questionnaire. Participants&#x2019; demographics and condition assignments are described in Table <xref ref-type="table" rid="T1">1</xref>. Prior to analysis, five participants were excluded due to: self-reported abnormal hearing (<italic>n</italic> = 1 from Live/Neutral-listener condition), movement restrictions (<italic>n</italic> = 1 from Album-playback/Fan condition), or having previously heard songs from the album (<italic>n</italic> = 3; 1 from Album-playback/Fan, 2 from Live/Fan conditions). Six participants who did not respond to a follow-up survey confirming fan-status were further excluded: 1 from Album playback/Fan, 2 from Album-playback/Neutral-listener, 2 from Live/Fan, 1 from Live/Neutral-listener conditions. The final sample consisted of 32 Fans and 17 Neutral-listeners. The McMaster University Research Ethics Board approved all procedures.</p>
<table-wrap position="float" id="T1">
<label>Table 1</label>
<caption><p>Participant demographics.</p></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left">Performer condition</th>
<th valign="top" align="center">Listener preference</th>
<th valign="top" align="center"><italic>N</italic></th>
<th valign="top" align="center">Gender (female, male)</th>
<th valign="top" align="center">Mean age (years)</th>
<th valign="top" align="center">Age range</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Present (live concert)</td>
<td valign="top" align="center">Fans</td>
<td valign="top" align="center">15</td>
<td valign="top" align="center">7, 8</td>
<td valign="top" align="center">39.7</td>
<td valign="top" align="center">28&#x2013;53</td>
</tr>
<tr>
<td valign="top" align="left"></td>
<td valign="top" align="center">Neutral</td>
<td valign="top" align="center">9</td>
<td valign="top" align="center">3, 6</td>
<td valign="top" align="center">25.2</td>
<td valign="top" align="center">19&#x2013;50</td>
</tr>
<tr>
<td valign="top" align="left">Absent (pre-recorded)</td>
<td valign="top" align="center">Fans</td>
<td valign="top" align="center">17</td>
<td valign="top" align="center">7, 10</td>
<td valign="top" align="center">31.8</td>
<td valign="top" align="center">19&#x2013;51</td>
</tr>
<tr>
<td valign="top" align="left"></td>
<td valign="top" align="center">Neutral</td>
<td valign="top" align="center">8</td>
<td valign="top" align="center">5, 3</td>
<td valign="top" align="center">29.8</td>
<td valign="top" align="center">19&#x2013;57</td>
</tr>
<tr>
<td valign="top" align="left">Total</td>
<td valign="top" align="center"></td>
<td valign="top" align="center">49</td>
<td valign="top" align="center">22, 27</td>
<td valign="top" align="center">31.63</td>
<td valign="top" align="center">19&#x2013;57</td></tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec><title>Stimuli and Apparatus</title>
<p>Ian Fletcher Thornley&#x2019;s record release party concert was the setting for this study. Participants listened to eight songs from Thornley&#x2019;s new studio album <italic>Secrets</italic> on the day of its official release. This release reached a top position of 9 on the Canadian iTunes sales charts on October 30th, 2015. The first seven songs were novel to all included participants. The final song in the concert, &#x201C;Blown Wide Open,&#x201D; was a cover version of a previous song that was familiar to fans<sup><xref ref-type="fn" rid="fn01">1</xref></sup>. The eight songs were presented in the following order in both conditions: (1) &#x201C;Just to Know I Can&#x201D;; (2) &#x201C;How Long&#x201D;; (3) &#x201C;Fool&#x201D;; (4) &#x201C;Elouise&#x201D;; (5) &#x201C;Frozen Pond&#x201D;; (6) &#x201C;Feel&#x201D;; (7) &#x201C;Secrets&#x201D;; and (8) reinterpretation of &#x201C;Blown Wide Open&#x201D;. These stimuli are hereafter referred to as Songs 1 through 8, respectively.</p>
<p>Both the Live and Album-playback concerts took place in the LIVELab<sup><xref ref-type="fn" rid="fn02">2</xref></sup>. The LIVELab is a research facility with a 106-seat performance hall designed for the study of human interaction in a variety of ecologically valid contexts, including music, dance and pedagogy. In both Live and Album-playback concerts, motion-recorded Fans and Neutral-listeners were seated interspersed in the front and centre of the audience across four rows with an average of 8 people per row. Sound for both concerts was presented over a high-quality Meyer Sound 6 channel house PA system (Left/Right Main Speakers, Meyer UPJ, Left/Right Front Fill, Meyer UP4, Left/Right Subwoofer, Meyer 500-HP). Reverberation was added to each instrument in the Live Concert via a Digico SD9 sound mixer. A sound technician manipulated volume and reverberation throughout the live concert as it would be at a professional live show. For the Live Concert condition, Thornley (vocals and electric guitar) and his band (electric bass, drums, and cigar box guitar) performed renditions of the 8 songs in the same order as they were presented in the Album-playback concert condition. Given that it was a live performance, there were minor variations in tempo and arrangement between the stimuli at the Live compared to Album-playback Concerts, as would be expected in any live performance of a recorded work (see Supplementary Table <xref ref-type="supplementary-material" rid="SM1">S1</xref> in the <xref ref-type="supplementary-material" rid="SM1">Supplementary Material</xref> for a comparison of the tempi of the pre-recorded and live songs). Coloured stage lights helped create the concert experience. Videos depicting a variety of neuroscience-themed phenomena played behind the performers on the stage video wall (3 &#x00D7; 3 array of Mitsubishi LM55S 55&#x2033; monitors) during the Live concert. In song 6, &#x201C;Feel,&#x201D; a video depiction of a previous recording of Thornley&#x2019;s neural responses when listening to the recording of his own song &#x201C;Feel&#x201D; were imaged from fMRI and EEG data. Referred to as &#x201C;Lightning Brain,&#x201D; the 5-min video can be viewed online<sup><xref ref-type="fn" rid="fn03">3</xref></sup>.</p>
<p>In the Album-playback concert, a photo of the <italic>Secrets</italic> album artwork was displayed on the stage video wall and the stage was dimly lit with coloured lights. The stage setup was identical for the two conditions; all of the instruments were in place and ready for performance. During Song 6 the video depiction of Thornley&#x2019;s neural responses was displayed as in the Live concert. See Supplementary Table <xref ref-type="supplementary-material" rid="SM1">S1</xref> for the tempi of the recorded and live songs.</p>
</sec>
<sec><title>Design and Procedure</title>
<p>The experimental design was a 2 &#x00D7; 2 &#x00D7; 7 with between-subjects factors Concert-status (Live, Album-playback) and Listener-preference (Fan, Neutral-listener) and within-subject factor Song (1, 2, 3, 4, 5, 6, and 7). The 8th song was analysed separately with only the between-subjects factors since it was familiar to Fans.</p>
<p>Fans and Neutral-listeners were randomly assigned to the Live or Album-playback conditions. In both cases participants were greeted at the entrance, filled out a consent form, and were fitted with a motion-capture cap. The caps did not restrict listener movement in any way. Participants were ushered into the theatre and to their seat. Once seated, additional audience members who did not participate in the study were then admitted to the theatre. Two researchers thanked the participants for their attendance and introduced the concert. Participants were instructed to do their best to forget that they were wearing caps and to enjoy the concert as they normally would. They were given no further instructions and were not encouraged to move in any particular way. Participants then completed a questionnaire on their familiarity with the performers, their current state of arousal and happiness, and their musical expertise (see Appendix <xref ref-type="supplementary-material" rid="SM1">S1</xref> in <xref ref-type="supplementary-material" rid="SM1">Supplementary Material</xref>). A follow-up questionnaire at the end of the concert asked the same questions regarding listener arousal and happiness.</p>
<p>Both concerts (Live, Album-playback) took place on the same day, with the Album-playback concert in the afternoon and the Live concert in the evening. During the Live concert, Thornley occasionally spoke to the audience between songs as performers would at a typical concert. Head movements between songs were not analysed. At the end of the Album-playback condition, Thornley and his band played a live song to avoid disappointing fans; head motion during this song was not analysed. A second questionnaire was sent to participants after the experiment to collect participant demographic information including age, sex, detailed music and dance experience and preferences.</p>
</sec>
<sec><title>Data Recording and Analysis</title>
<p>An audio recording of the live performance was recorded for later analysis. A passive optical motion capture system (24 Oqus 5+ cameras and an Oqus 210c video camera, Qualisys) recorded the head movements of participants at 90 Hz. Four retroreflective markers (10 mm) were placed on felt caps worn by the participants, forming a rigid body. One marker was placed on the front of the head, one on top of the head, and one on each temple.</p>
<p>Motion capture data were cleaned and labelled using the Qualisys Track Manager, then exported to MATLAB (The MathWorks Inc., 2015) for analysis with the motion capture toolbox (<xref ref-type="bibr" rid="B9">Burger and Toiviainen, 2013</xref>). Motion data were gap-filled using linear interpolation, then low-pass filtered at 6 Hz to remove jitter. The positions of the four head markers were averaged to produce a single, stable representation of participant head centre (Supplementary Figure <xref ref-type="supplementary-material" rid="SM1">S1</xref>). Data were then normalised and segmented into songs. After preparation, two measures of participant head motion were generated.</p>
<sec><title>Movement Vigour</title>
<p>The average movement speed of each participant in mm/s was calculated to provide a representation of movement vigour (<xref ref-type="bibr" rid="B55">Mazzoni et al., 2007</xref>; <xref ref-type="bibr" rid="B99">Zentner and Eerola, 2010</xref>; <xref ref-type="bibr" rid="B50">Leow et al., 2014</xref>). The speed of participants&#x2019; movements was estimated by taking the first derivative of the motion signal (differences in position between adjacent frames). Speed trajectories were then smoothed using a second-order lowpass Butterworth filtre with a normalised low-pass frequency of 0.2&#x03C0; radians per sample. At a sampling frequency of 90 Hz, this equated to a 9 Hz low-pass filtre. Movement vigour is conceptually independent of synchronisation; a participant could remain in perfect synchrony to a given tempo and still move with more or less vigour (e.g., by increasing or decreasing the distance they moved their head), and a participant could also remain completely unsynchronised and still move with more or less vigour.</p>
</sec>
<sec><title>Degree of Entrainment</title>
<p>The degree of entrainment was defined as how frequently participants entrained their movements to the beat of each song. Movement periodicities were extracted with a windowed autocorrelation performed on listeners head-centre motion trajectories, with window size of 10 s, hop size of 5 s, and lags ranging from 0 to 2 s using <italic>mcwindow</italic> and <italic>mcperiod</italic> functions from the Mocap Toolbox (<xref ref-type="bibr" rid="B26">Eerola et al., 2006</xref>; <xref ref-type="bibr" rid="B9">Burger and Toiviainen, 2013</xref>). The tempi of the songs from both the Live and Album-playback concerts were determined by two musically trained raters (first and third authors, <italic>n</italic> = 9 and <italic>n</italic> = 15 years of formal training, respectively) who tapped along to the beat of each song while listening to the recordings of the album and the Live concert using a metronome application (Metronome Beats, Stonekick&#x00A9;2015). The average inter-beat interval period was calculated from the song tempo, and this period was used to calculate the period at the quarter, half, and whole note levels of the musical metrical hierarchy for each song at which participants could have entrained. The participants&#x2019; head movement period at each window, obtained from the autocorrelation analysis, was compared to the three possible periods of each song. If the participant&#x2019;s period of motion was within 5% of one of these beat periods, then that window was added to a count of the number of windows demonstrating entrainment. The measure of degree of entrainment was defined as the number of windows with entrainment divided by the total number of possible windows, to give the proportion of entrainment, which could range between 0.0 (no entrainment) and 1.0 (perfect entrainment). Actual measured proportions ranged from 0.0 to 0.58 depending on the participant and song. Our overall grand mean entrainment proportion of 0.081 was smaller, but of similar magnitude, to that found by <xref ref-type="bibr" rid="B8">Burger et al. (2014)</xref> who showed period-locking proportions less than 0.3 (summing tactus divisions and excluding inferior-superior movement, which our seated participants were not free to engage in). Smaller values would be expected in our case, given that for the <xref ref-type="bibr" rid="B8">Burger et al. (2014)</xref> experiment participants were standing and specifically asked to move to the music, whereas in the present study participants were seated and were not given any instructions regarding movement.</p>
</sec>
</sec></sec>
<sec><title>Results</title>
<sec><title>Analyses of the First Seven Unfamiliar Songs</title>
<p>Movement vigour and degree of entrainment were analysed with repeated measures ANOVAs, with between-subjects factors Concert-status (Live, Album-playback) and Listener-preference (Fan, Neutral-listener), and within-subjects factor Song (1, 2, 3, 4, 5, 6, and 7). When Mauchly&#x2019;s test indicated that sphericity was violated, Greenhouse-Geisser&#x2019;s corrections were applied. Effect sizes are reported with partial eta-squared values, means are accompanied by a variance measure of one standard error of the mean (<italic>SEM</italic>). Pairwise comparisons were adjusted using Bonferroni correction. Statistical tests were conducted in SPSS 2013 v20.0.0. Experiment-wise corrections were not implemented on the reported values, but below we note the two cases in which such a correction would affect interpretation of an effect as significant.</p>
<sec><title>Concert-Status</title>
<p>There was a main effect of Concert-status for vigour, but not for entrainment, <italic>F</italic>(1,45) = 15.783, <italic>p</italic> &#x003C; 0.001, <inline-formula><mml:math id="M1"><mml:msubsup><mml:mo>&#x03B7;</mml:mo><mml:mo>p</mml:mo><mml:mn mathvariant='normal' mathcolor='black'>2</mml:mn></mml:msubsup></mml:math></inline-formula> = 0.260 and <italic>F</italic>(1,45) = 1.569, <italic>p</italic> = 0.217, <inline-formula><mml:math id="M2"><mml:msubsup><mml:mo>&#x03B7;</mml:mo><mml:mo>p</mml:mo><mml:mn mathvariant='normal' mathcolor='black'>2</mml:mn></mml:msubsup></mml:math></inline-formula> = 0.034, respectively. Participants moved more vigorously in the Live concert (<italic>M</italic> = 15.559, <italic>SEM</italic> = 1.397) than the Album-playback concert (<italic>M</italic> = 7.644, <italic>SEM</italic> = 1.421) condition. These results indicate that the Live concert increased vigour but not necessarily the degree of entrainment of head movements. The interaction between Concert-status and Listener-preference was not significant for either vigour or entrainment.</p>
</sec>
<sec><title>Listener-Preference</title>
<p>As predicted, there was a main effect of Listener-preference for both vigour and entrainment, <italic>F</italic>(1,45) = 12.871, <italic>p</italic> = 0.001, <inline-formula><mml:math id="M3"><mml:msubsup><mml:mo>&#x03B7;</mml:mo><mml:mo>p</mml:mo><mml:mn mathvariant='normal' mathcolor='black'>2</mml:mn></mml:msubsup></mml:math></inline-formula> = 0.222, and <italic>F</italic>(1,45) = 4.197, <italic>p</italic> = 0.046, <inline-formula><mml:math id="M4"><mml:msubsup><mml:mo>&#x03B7;</mml:mo><mml:mo>p</mml:mo><mml:mn mathvariant='normal' mathcolor='black'>2</mml:mn></mml:msubsup></mml:math></inline-formula> = 0.085, respectively. (Note that the effect of Listener-preference on entrainment is no longer significant if experiment-wise Bonferroni correction for multiple comparisons is implemented). Fans (<italic>M</italic> = 15.175, <italic>SEM</italic> = 1.174) moved faster than Neutral-listeners (<italic>M</italic> = 8.027, <italic>SEM</italic> = 1.610) and Fans (<italic>M</italic> = 0.074, <italic>SEM</italic> = 0.007) showed a higher degree of entrainment than Neutral-listeners (<italic>M</italic> = 0.050, <italic>SEM</italic> = 0.01). These results indicate that Listener-preference affected both vigour and entrainment of head movements. The interaction between Concert-status and Listener-preference was not significant for either vigour or entrainment.</p>
</sec>
<sec><title>Song</title>
<p>In addition to the main effects produced by the between-subjects variables, there was a main effect of Song for both vigour and entrainment, <italic>F</italic>(4.439,199.768) = 9.626, <italic>p</italic> &#x003C; 0.001, <inline-formula><mml:math id="M5"><mml:msubsup><mml:mo>&#x03B7;</mml:mo><mml:mo>p</mml:mo><mml:mn mathvariant='normal' mathcolor='black'>2</mml:mn></mml:msubsup></mml:math></inline-formula> = 0.176 and <italic>F</italic>(3.254,146.414) = 19.022, <italic>p</italic> &#x003C; 0.001, <inline-formula><mml:math id="M6"><mml:msubsup><mml:mo>&#x03B7;</mml:mo><mml:mo>p</mml:mo><mml:mn mathvariant='normal' mathcolor='black'>2</mml:mn></mml:msubsup></mml:math></inline-formula> = 0.297, respectively. This indicates substantial differences between songs in their ability to produce both fast and entrained movement, likely due to intrinsic properties of the songs, such as tempo (see Figures <xref ref-type="fig" rid="F1">1</xref>, <xref ref-type="fig" rid="F2">2</xref>; song tempi are provided in Supplementary Table <xref ref-type="supplementary-material" rid="SM1">S1</xref> in the <xref ref-type="supplementary-material" rid="SM1">Supplementary Material</xref>). Interestingly, songs producing the fastest movement were not necessarily the same songs that produced maximal entrainment, indicating the possibility of some level of independence between these two measures. An acoustic analysis of the songs from both performances is underway as a separate paper in which we plan to relate head movements to characteristics such as Danceability, Energy, Instrumentalness, Liveness, and Valence of individual songs.</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption><p>Vigour of head movements across songs. The distance travelled within a song was divided by the total length of the song, giving a value in millimetres per second. Fans moved with greater vigour than Neutral-listeners for every song and those in the Live Concert condition moved with greater vigour than those in the Album-playback Concert condition for every song. Vigour varied among songs, and was qualified depending on Concert-status (Live, Album-playback). The songs were: (1) &#x201C;Just to Know I Can&#x201D;; (2) &#x201C;How Long&#x201D;; (3) &#x201C;Fool&#x201D;; (4) &#x201C;Elouise&#x201D;; (5) &#x201C;Frozen Pond&#x201D;; (6) &#x201C;Feel&#x201D;; (7) &#x201C;Secrets&#x201D;; and (8) reinterpretation of &#x201C;Blown Wide Open.&#x201D; The violin plots show the same parameters as a standard box plot (range, interquartile range and median) as well as a kernel density plot that estimates the continuous distribution of the data.</p></caption>
<graphic xlink:href="fpsyg-09-02682-g001.tif"/>
</fig>
<fig id="F2" position="float">
<label>FIGURE 2</label>
<caption><p>Proportion of movement entrainment across songs. Fans generally showed a higher degree of entrainment to the tempo of the music than Neutral-listeners. However, there was variation among songs, which interacted with Concert-status. The songs were: (1) &#x201C;Just to Know I Can&#x201D;; (2) &#x201C;How Long&#x201D;; (3) &#x201C;Fool&#x201D;; (4) &#x201C;Elouise&#x201D;; (5) &#x201C;Frozen Pond&#x201D;; (6) &#x201C;Feel&#x201D;; (7) &#x201C;Secrets&#x201D;; and (8) reinterpretation of &#x201C;Blown Wide Open.&#x201D; The violin plots show the same parameters as a standard box plot (range, interquartile range and median) as well as a kernel density plot that estimates the continuous distribution of the data.</p></caption>
<graphic xlink:href="fpsyg-09-02682-g002.tif"/>
</fig>
<p>There was also an interaction between song and Listener-preference for both vigour and entrainment, <italic>F</italic>(4.439,199.768) = 2.428, <italic>p</italic> = 0.003, <inline-formula><mml:math id="M7"><mml:msubsup><mml:mo>&#x03B7;</mml:mo><mml:mo>p</mml:mo><mml:mn mathvariant='normal' mathcolor='black'>2</mml:mn></mml:msubsup></mml:math></inline-formula> = 0.082, and <italic>F</italic>(3.254,146.414) = 3.010, <italic>p</italic> = 0.029, <inline-formula><mml:math id="M8"><mml:msubsup><mml:mo>&#x03B7;</mml:mo><mml:mo>p</mml:mo><mml:mn mathvariant='normal' mathcolor='black'>2</mml:mn></mml:msubsup></mml:math></inline-formula> = 0.063, respectively. This interaction indicates that Fans and Neutral-listeners reacted differently to different songs (It should be noted that the interaction between song and Listener-preference on entrainment is no longer significant if experiment-wise Bonferroni correction for multiple comparisons is implemented).</p>
</sec>
</sec>
<sec><title>Analyses of the 8th Song</title>
<p>The final song (&#x201C;Blown Wide Open,&#x201D; released in 1997) was analysed separately because it was familiar to Thornley&#x2019;s fans, having been one of the most famous songs from his previous band Big Wreck. This provides a preliminary exploration of how familiarity can promote movement.</p>
<sec><title>Concert-Status</title>
<p>There was a main effect of Concert-status on vigour, <italic>F</italic>(1,45) = 16.929, <italic>p</italic> &#x003C; 0.001, <inline-formula><mml:math id="M9"><mml:msubsup><mml:mo>&#x03B7;</mml:mo><mml:mo>p</mml:mo><mml:mn mathvariant='normal' mathcolor='black'>2</mml:mn></mml:msubsup></mml:math></inline-formula> = 0.273. Movement was more vigorous in the Live concert (<italic>M</italic> = 20.32 mm/s, <italic>SEM</italic> = 2.003) than Album-playback concert (<italic>M</italic> = 8.56 mm/s, <italic>SEM</italic> = 2.037) condition. There was also a main effect on entrainment, <italic>F</italic>(1,45) = 11.917, <italic>p</italic> = 0.001, <inline-formula><mml:math id="M10"><mml:msubsup><mml:mo>&#x03B7;</mml:mo><mml:mo>p</mml:mo><mml:mn mathvariant='normal' mathcolor='black'>2</mml:mn></mml:msubsup></mml:math></inline-formula> = 0.209. The degree of entrainment was higher in the Live concert (<italic>M</italic> = 0.235, SEM = 0.029) than Album-playback concert (<italic>M</italic> = 0.091, SEM = 0.030) condition.</p>
</sec>
<sec><title>Listener-Preference</title>
<p>For Listener-preference, there was a main effect on vigour, <italic>F</italic>(1,45) = 14.494, <italic>p</italic> &#x003C; 0.001, <inline-formula><mml:math id="M11"><mml:msubsup><mml:mo>&#x03B7;</mml:mo><mml:mo>p</mml:mo><mml:mn mathvariant='normal' mathcolor='black'>2</mml:mn></mml:msubsup></mml:math></inline-formula> = 0.244. Fans (<italic>M</italic> = 19.88 mm/s, <italic>SEM</italic> = 1.683) moved faster than Neutral-listeners (<italic>M</italic> = 9.00 mm/s, <italic>SEM</italic> = 2.308). There was also a main effect on entrainment, <italic>F</italic>(1,45) = 13.630, <italic>p</italic> = 0.001, <inline-formula><mml:math id="M12"><mml:msubsup><mml:mo>&#x03B7;</mml:mo><mml:mo>p</mml:mo><mml:mn mathvariant='normal' mathcolor='black'>2</mml:mn></mml:msubsup></mml:math></inline-formula> = 0.232. Fans (<italic>M</italic> = 0.24, <italic>SEM</italic> = 0.025) entrained to a greater degree than Neutral-listeners (<italic>M</italic> = 0.086, <italic>SEM</italic> = 0.034). The interaction between Concert-status and Listener-preference was not significant.</p>
</sec>
</sec>
<sec><title>Musicians Versus Non-musicians</title>
<p>Using the self-reported measures of music experience, participants were categorised as musicians (<italic>N</italic> = 25; mean years of training = 11.7; range = 1&#x2013;38) or non-musicians with no musical training (<italic>N</italic> = 24). Independent-samples <italic>t</italic>-tests were performed for vigour for the mean of Songs 1&#x2013;7, <italic>t</italic>(47) = 0.6, <italic>p</italic> = 0.58, vigour for Song 8, <italic>t</italic>(47) = 0.4, <italic>p</italic> = 0.68, entrainment for the mean of Songs 1&#x2013;7, <italic>t</italic>(47) = 0.5, <italic>p</italic> = 0.62, and entrainment for Song 8, <italic>t</italic>(47) = 0.8, <italic>p</italic> = 0.45. There were no significant differences on any of these measures.</p>
</sec>
</sec>
<sec><title>Discussion</title>
<p>The question of why people enjoy attending live concerts, when the same music can be experienced more easily and for less money at home, likely involves two aspects: the social sharing of the experience in a group of people; and &#x201C;live&#x201D; aspects, including connecting with the artists and experiencing the potential for spontaneity and unpredictability of live music as it unfolds over time, compared to a pre-recorded and unchanging version on a recording that a fan might become familiar with after repeated listening. In our study, we examined primarily the second aspect, comparing listening to a recording of a set of songs from Ian Fletcher Thornley&#x2019;s 2015 album <italic>Secrets</italic> to listening to a live performance of those songs, while keeping the social aspect largely the same: both the Live and Album-playback concerts were experienced in the context of an audience in the same LIVELab venue. In the case of this study, audiences were not familiar with recorded songs, but nonetheless may have reacted to the knowledge that the music in the Live condition was unfolding in a unique way that would never be repeated exactly. Necessarily, the visual stimulation differed between the two conditions because of the presence of the live performers. We feel that this is not necessarily a confound&#x2014;a live performance requires the presence of performers&#x2014;but future studies might incorporate some visual stimulation that tries to better equate the two conditions, for example, by showing a video of the live performance. We also examined how being a fan of the musical group affected these experiences by comparing self-reported Fans and Neutral-listeners randomly assigned to the Live and Album-playback concert conditions. We focused on head movements, using motion capture to extract the vigour and degree of entrainment of head movements to the beat of music (<xref ref-type="bibr" rid="B85">Toiviainen et al., 2010</xref>; <xref ref-type="bibr" rid="B7">Burger et al., 2013</xref>).</p>
<p>We found that for both Fans and Neutral-listeners, head movements were more vigorous in the Live than the Album-playback concert, but Concert-status did not affect degree of entrainment to the beat. On the other hand, across both concert conditions, Fans moved their heads more vigorously and with better entrainment to the beat compared to Neutral-listeners. The greater degree of entrainment to the beat in general in Fans likely reflects their greater familiarity with the artist&#x2019;s musical style. The greater vigour of head movements across groups at the Live compared to Album-playback concert likely represents greater arousal, increased anticipation, and increased connection with the artists and their music during the live concert (<xref ref-type="bibr" rid="B55">Mazzoni et al., 2007</xref>; <xref ref-type="bibr" rid="B50">Leow et al., 2014</xref>). Amount of musical training varied across audience members, but there were no differences between musicians and non-musicians in either movement vigour or synchronisation to the beat. Similarly, <xref ref-type="bibr" rid="B4">Bernardi et al. (2017)</xref> reported that musical training did not affect the degree of synchronisation of autonomic responses to the beat of music experienced in a group setting. Together, these results suggest that entrainment responses in audiences are independent of musical training.</p>
<p>We controlled for song familiarity across Fans and Neutral-listeners by using songs that had not yet been publicly released (the first 7 songs of the concerts). The eighth song, &#x201C;Blown Wide Open,&#x201D; on the other hand, was certainly familiar to Fans, and may have been familiar to some Neutral-listeners as its original rendition had achieved double platinum sales in Canada in the late 1990s. Interestingly, when the songs were not familiar, there was no difference in degree of entrainment to the music across the Live and Album-playback concerts. However, for the eighth song that was familiar at least to Fans, head movement entrainment was greater during the Live than Album-playback concert. This suggests that while the vigour of head movements is affected by whether the music is live or pre-recorded regardless of familiarity, familiarity with the music may foster greater entrainment to the beat during live compared to recorded contexts.</p>
<p>Vigour of head movements and degree of entrainment differed across songs. Further, there were interactions for both measures between Songs and Listener-preference, indicating that Fans and Neutral-listeners reacted differently to different songs. This suggests that some songs might excite existing fans differently than na&#x00EF;ve listeners, which might inform record company promotion decisions. Concerts are becoming increasingly important for the music industry as the prevalence of piracy results in reduced revenue from album recordings (<xref ref-type="bibr" rid="B34">Frith, 2007</xref>; <xref ref-type="bibr" rid="B65">Papies and van Heerde, 2017</xref>). Interestingly, the majority of audience members report that cost does not influence their decisions to attend concerts (<xref ref-type="bibr" rid="B6">Brown and Knox, 2017</xref>). In general, research on audience development and retention could be important for sustaining the multi-billion dollar music industry (<xref ref-type="bibr" rid="B64">O&#x2019;Reilly et al., 2014</xref>; <xref ref-type="bibr" rid="B65">Papies and van Heerde, 2017</xref>).</p>
<p>Music compels us to move, the likely result of connections between auditory and motor areas of the brain (<xref ref-type="bibr" rid="B71">Sakai et al., 1999</xref>; <xref ref-type="bibr" rid="B41">Janata and Grafton, 2003</xref>; <xref ref-type="bibr" rid="B37">Grahn and Brett, 2007</xref>; <xref ref-type="bibr" rid="B98">Zatorre et al., 2007</xref>; <xref ref-type="bibr" rid="B38">Grahn and Rowe, 2009</xref>; <xref ref-type="bibr" rid="B42">Janata et al., 2012</xref>), whose communication during rhythm and beat prediction can be measured in neural oscillations (<xref ref-type="bibr" rid="B36">Fujioka et al., 2012</xref>). Certain characteristics of music lead to increased entrainment to music and compulsion of movement, such as beat predictability and rhythmic complexity (<xref ref-type="bibr" rid="B32">Fitch, 2016</xref>), the density of events between beats (<xref ref-type="bibr" rid="B53">Madison et al., 2011</xref>), moderate levels of syncopation (<xref ref-type="bibr" rid="B93">Witek et al., 2014</xref>; <xref ref-type="bibr" rid="B32">Fitch, 2016</xref>), and possibly micro-timing deviations (cf. <xref ref-type="bibr" rid="B53">Madison et al., 2011</xref>; <xref ref-type="bibr" rid="B24">Davies et al., 2013</xref>; <xref ref-type="bibr" rid="B82">Stupacher et al., 2013</xref>; <xref ref-type="bibr" rid="B44">Kilchenmann and Senn, 2015</xref>). The present study demonstrates that in addition to acoustic characteristics of music, environmental and personal factors influence movement to music as well. Specifically, familiarity with the performer and musical style (Listener-preference) led to increased movement and entrainment, while the live performance (Concert-status) led to a significant increase in movement vigour. Because synchronous movement can lead to prosociality (<xref ref-type="bibr" rid="B40">Hove and Risen, 2009</xref>; <xref ref-type="bibr" rid="B92">Wiltermuth and Heath, 2009</xref>; <xref ref-type="bibr" rid="B89">Valdesolo et al., 2010</xref>; <xref ref-type="bibr" rid="B88">Valdesolo and DeSteno, 2011</xref>; <xref ref-type="bibr" rid="B47">Launay et al., 2013</xref>; <xref ref-type="bibr" rid="B17">Cirelli et al., 2014b</xref>; <xref ref-type="bibr" rid="B86">Trainor and Cirelli, 2015</xref>; <xref ref-type="bibr" rid="B69">Rennung and Goritz, 2016</xref>; <xref ref-type="bibr" rid="B95">Woolhouse et al., 2016</xref>), and because entrainment to music was fostered more by Listener-preference than Concert-status, it is possible that personal factors are more important than environmental factors for generating synchronous movement and subsequent prosociality.</p>
<p>This study adds to the fledgling literature examining music listening in concert settings (<xref ref-type="bibr" rid="B28">Egermann et al., 2011</xref>; <xref ref-type="bibr" rid="B75">Shoda and Adachi, 2012</xref>, <xref ref-type="bibr" rid="B76">2015</xref>, <xref ref-type="bibr" rid="B77">2016</xref>; <xref ref-type="bibr" rid="B31">Fancourt and Williamon, 2016</xref>; <xref ref-type="bibr" rid="B78">Shoda et al., 2016</xref>). It provides unique insight into how live music is experienced in ecologically valid conditions, and how that experience is expressed through body movement. Many questions that remain could be addressed in future research in the LIVELab, such as how individual differences in personality affect live concert experiences, how individuals in a concert setting are affected by the movements of those around them, the effects of different musical characteristics (e.g., tempo, instrumentation, presence of improvisation, genre), whether synchronous movements in a concert setting leads to increased prosociality and bonding, and how performers are affected by audiences.</p>
</sec>
<sec><title>Data Availability</title>
<p>The raw data supporting the conclusions of this manuscript will be made available on <ext-link ext-link-type="uri" xlink:href="https://zenodo.org/">https://zenodo.org/</ext-link> (search for &#x2018;LIVELab) by the authors, without undue reservation, to any qualified researcher.</p>
</sec>
<sec><title>Ethics Statement</title>
<p>This study was carried out in accordance with the recommendations of the Canadian Tri-Council Policy Statement: Ethical Conduct for Research Involving Humans (T), with written informed consent from all subjects. All subjects gave written informed consent in accordance with the Declaration of Helsinki. The protocol was approved by the McMaster University Research Ethics Board.</p>
</sec>
<sec><title>Author Contributions</title>
<p>DS involved in data collection and analyses, and the preparation and review of the manuscript. DB involved in the research design, data collection and analyses, and the preparation and review of the manuscript. SL involved in motion data collection, statistical analyses, and review of the manuscript. JB involved in project and research design, recruitment, organisation, and data collection, and review of the manuscript. MW involved in the conception and organisation of the project including artist-Anthem coordination, research design, and review of the manuscript. SM-R involved in recruitment, data collection and review of the manuscript. LT involved in the conception and organisation of the project, research design, review of the statistical analyses, and preparation and review of the manuscript.</p>
</sec>
<sec><title>Conflict of Interest Statement</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
</body>
<back>
<fn-group>
<fn fn-type="financial-disclosure">
<p><bold>Funding.</bold> This research was funded by Anthem Records, a grant to LT from the Social Sciences and Humanities Research Council of Canada (435-2016-1442), and a grant to MW from the Canadian Foundation for Innovation (30524).</p>
</fn>
</fn-group>
<ack>
<p>We thank Ian Fletcher Thornley and his band for their generosity in participating in this study. We thank Andy Curran of Anthem Records for collaborating with the Digital Music Lab and coordinating the launch of the <italic>Secrets</italic> album at the LIVELab. We thank Dave Thompson and Ranil Sonnadara for their assistance on sound and lighting, and for their assistance in executing the concert. We are grateful to Alex Zaranek, Research Assistant in the Digital Music Lab, for creating the video &#x201C;Lightning Brain&#x201D; shown within the live concert; Mike Noseworthy recorded the MRI data used to render the 3D brain in the video. We also thank the many volunteers for their help throughout the concerts. Finally, we thank the many participants for volunteering their time.</p>
</ack>
<sec sec-type="supplementary material">
<title>Supplementary Material</title>
<p>The Supplementary Material for this article can be found online at: <ext-link ext-link-type="uri" xlink:href="https://www.frontiersin.org/articles/10.3389/fpsyg.2018.02682/full#supplementary-material">https://www.frontiersin.org/articles/10.3389/fpsyg.2018.02682/full#supplementary-material</ext-link></p>
<supplementary-material xlink:href="Data_Sheet_1.PDF" id="SM1" mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" xmlns:xlink="http://www.w3.org/1999/xlink"/>
</sec>
<ref-list>
<title>References</title>
<ref id="B1"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Arnal</surname> <given-names>L. H.</given-names></name> <name><surname>Giraud</surname> <given-names>A.</given-names></name></person-group> (<year>2012</year>). <article-title>Cortical oscilations and sensory predictions.</article-title> <source><italic>Trends Cogn. Sci.</italic></source> <volume>16</volume> <fpage>390</fpage>&#x2013;<lpage>398</lpage>. <pub-id pub-id-type="doi">10.1016/j.tics.2012.05.003</pub-id> <pub-id pub-id-type="pmid">22682813</pub-id></citation></ref>
<ref id="B2"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Atkinson</surname> <given-names>A. P.</given-names></name> <name><surname>Dittrich</surname> <given-names>W. H.</given-names></name> <name><surname>Gemmell</surname> <given-names>A. J.</given-names></name> <name><surname>Young</surname> <given-names>A. W.</given-names></name></person-group> (<year>2004</year>). <article-title>Emotion perception from dynamic and static body expressions in point-light and full-light displays.</article-title> <source><italic>Perception</italic></source> <volume>33</volume> <fpage>717</fpage>&#x2013;<lpage>746</lpage>. <pub-id pub-id-type="doi">10.1068/p5096</pub-id> <pub-id pub-id-type="pmid">15330366</pub-id></citation></ref>
<ref id="B3"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Baxter-Moore</surname> <given-names>N.</given-names></name> <name><surname>Kitts</surname> <given-names>T. M.</given-names></name></person-group> (<year>2016</year>). <article-title>The live concert experience: an introduction.</article-title> <source><italic>Rock Music Stud.</italic></source> <volume>3</volume> <fpage>1</fpage>&#x2013;<lpage>4</lpage>. <pub-id pub-id-type="doi">10.1080/19401159.2015.1131923</pub-id> <pub-id pub-id-type="pmid">30319469</pub-id></citation></ref>
<ref id="B4"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bernardi</surname> <given-names>N. F.</given-names></name> <name><surname>Codrons</surname> <given-names>E.</given-names></name> <name><surname>di Leo</surname> <given-names>R.</given-names></name> <name><surname>Vandoni</surname> <given-names>M.</given-names></name> <name><surname>Cavallaro</surname> <given-names>F.</given-names></name> <name><surname>Vita</surname> <given-names>G.</given-names></name><etal/></person-group> (<year>2017</year>). <article-title>Increase in synchronization of autonomic rhythms between individuals when listening to music.</article-title> <source><italic>Front. Physiol.</italic></source> <volume>8</volume>:<issue>785</issue>. <pub-id pub-id-type="doi">10.3389/fphys.2017.00785</pub-id> <pub-id pub-id-type="pmid">29089898</pub-id></citation></ref>
<ref id="B5"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bradby</surname> <given-names>B.</given-names></name></person-group> (<year>2017</year>). <article-title>&#x201C;Performer-audience interaction in live concerts: ritual or conversation?,&#x201D; in</article-title> <source><italic>Musicians and their Audiences: Performance, Speech, and Mediation</italic></source> <role>eds</role> <person-group person-group-type="editor"><name><surname>Tsioulakis</surname> <given-names>I.</given-names></name> <name><surname>Hytonen-Ng</surname> <given-names>E.</given-names></name></person-group> (<publisher-loc>New York, NY</publisher-loc>: <publisher-name>Routledge</publisher-name>) <fpage>86</fpage>&#x2013;<lpage>104</lpage>.</citation></ref>
<ref id="B6"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Brown</surname> <given-names>S. C.</given-names></name> <name><surname>Knox</surname> <given-names>D.</given-names></name></person-group> (<year>2017</year>). <article-title>Why go to pop concerts? The motivations behind live music attendance.</article-title> <source><italic>Music. Sci.</italic></source> <volume>21</volume> <fpage>233</fpage>&#x2013;<lpage>249</lpage>. <pub-id pub-id-type="doi">10.1177/1029864916650719</pub-id></citation></ref>
<ref id="B7"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Burger</surname> <given-names>B.</given-names></name> <name><surname>Thompson</surname> <given-names>M. R.</given-names></name> <name><surname>Luck</surname> <given-names>G.</given-names></name> <name><surname>Saarikallio</surname> <given-names>S.</given-names></name> <name><surname>Toiviainen</surname> <given-names>P.</given-names></name></person-group> (<year>2013</year>). <article-title>Influences of rhythm- and timbre-related musical features on characteristics of music-induced movement.</article-title> <source><italic>Front. Psychol.</italic></source> <volume>4</volume>:<issue>183</issue>. <pub-id pub-id-type="doi">10.3389/fpsyg.2013.00183</pub-id> <pub-id pub-id-type="pmid">23641220</pub-id></citation></ref>
<ref id="B8"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Burger</surname> <given-names>B.</given-names></name> <name><surname>Thompson</surname> <given-names>M. R.</given-names></name> <name><surname>Luck</surname> <given-names>G.</given-names></name> <name><surname>Saarikallio</surname> <given-names>S.</given-names></name> <name><surname>Toiviainen</surname> <given-names>P.</given-names></name></person-group> (<year>2014</year>). <article-title>Hunting for the beat in the body: on period and phase locking in music-induced movement.</article-title> <source><italic>Front. Hum. Neurosci</italic></source> <volume>8</volume>:<issue>903</issue>. <pub-id pub-id-type="doi">10.3389/fnhum.2014.00903</pub-id> <pub-id pub-id-type="pmid">25426051</pub-id></citation></ref>
<ref id="B9"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Burger</surname> <given-names>B.</given-names></name> <name><surname>Toiviainen</surname> <given-names>P.</given-names></name></person-group> (<year>2013</year>). <article-title>&#x201C;MoCap Toolbox - A Matlab toolbox for computational analysis of movement data,&#x201D; in</article-title> <source><italic>Proceedings of the Sound and Music Computing Conference 2013</italic> SMC 2013</source> <role>ed.</role> <person-group person-group-type="editor"><name><surname>Bresin</surname> <given-names>R.</given-names></name></person-group> (<publisher-loc>Stockholm</publisher-loc>: <publisher-name>Logos Verlag Berlin</publisher-name>) <fpage>172</fpage>&#x2013;<lpage>178</lpage>.</citation></ref>
<ref id="B10"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Burland</surname> <given-names>K.</given-names></name> <name><surname>Pitts</surname> <given-names>S.</given-names></name></person-group> <comment>(eds)</comment> (<year>2014</year>). <source><italic>Coughing and Clapping: Investigating the Audience Experience.</italic></source> <publisher-loc>New York, NY</publisher-loc>: <publisher-name>Routledge</publisher-name>.</citation></ref>
<ref id="B11"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Calderone</surname> <given-names>D. J.</given-names></name> <name><surname>Lakatos</surname> <given-names>P.</given-names></name> <name><surname>Butler</surname> <given-names>P. D.</given-names></name> <name><surname>Castellanos</surname> <given-names>F. X.</given-names></name></person-group> (<year>2014</year>). <article-title>Entrainment of neural oscillations as a modifiable substrate of attention.</article-title> <source><italic>Trends Cogn. Sci.</italic></source> <volume>18</volume> <fpage>300</fpage>&#x2013;<lpage>309</lpage>. <pub-id pub-id-type="doi">10.1016/j.tics.2014.02.005</pub-id> <pub-id pub-id-type="pmid">24630166</pub-id></citation></ref>
<ref id="B12"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chan</surname> <given-names>L. P.</given-names></name> <name><surname>Livingstone</surname> <given-names>S. R.</given-names></name> <name><surname>Russo</surname> <given-names>F. A.</given-names></name></person-group> (<year>2013</year>). <article-title>Facial mimicry in response to song.</article-title> <source><italic>Music Percept.</italic></source> <volume>30</volume> <fpage>361</fpage>&#x2013;<lpage>367</lpage>. <pub-id pub-id-type="doi">10.1525/MP.2013.30.4.361</pub-id></citation></ref>
<ref id="B13"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chang</surname> <given-names>A.</given-names></name> <name><surname>Bosnyak</surname> <given-names>D. J.</given-names></name> <name><surname>Trainor</surname> <given-names>L. J.</given-names></name></person-group> (<year>2018a</year>). <article-title>Beta oscillatory power modulation reflects the predictability of pitch change.</article-title> <source><italic>Cortex</italic></source> <volume>106</volume> <fpage>248</fpage>&#x2013;<lpage>260</lpage>. <pub-id pub-id-type="doi">10.1016/j.cortex.2018.06.008</pub-id> <pub-id pub-id-type="pmid">30053731</pub-id></citation></ref>
<ref id="B14"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chang</surname> <given-names>A.</given-names></name> <name><surname>Kragness</surname> <given-names>H.</given-names></name> <name><surname>Tsou</surname> <given-names>W.</given-names></name> <name><surname>Thiede</surname> <given-names>A.</given-names></name> <name><surname>Bosnyak</surname> <given-names>D.</given-names></name> <name><surname>Trainor</surname> <given-names>L. J.</given-names></name></person-group> (<year>2018b</year>). <article-title>&#x201C;Speed dating in the LOVELab: The role of groovy music and body sway coupling in romantic attraction,&#x201D; in</article-title> <source><italic>Proceedings of the 47th Annual Lake Ontario Visionary Establishment (LOVE) Conference</italic></source> <publisher-loc>Niagara Falls, NY</publisher-loc>.</citation></ref>
<ref id="B15"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chang</surname> <given-names>A.</given-names></name> <name><surname>Livingstone</surname> <given-names>S. R.</given-names></name> <name><surname>Bosnyak</surname> <given-names>D. J.</given-names></name> <name><surname>Trainor</surname> <given-names>L. J.</given-names></name></person-group> (<year>2017</year>). <article-title>Body sway reflects leadership in joint music performance.</article-title> <source><italic>Proc. Natl. Acad. Sci. U.S.A.</italic></source> <volume>114</volume> <fpage>E4134</fpage>&#x2013;<lpage>E4141</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.1617657114</pub-id> <pub-id pub-id-type="pmid">28484007</pub-id></citation></ref>
<ref id="B16"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cirelli</surname> <given-names>L. K.</given-names></name> <name><surname>Bosnyak</surname> <given-names>D.</given-names></name> <name><surname>Manning</surname> <given-names>F. C.</given-names></name> <name><surname>Spinelli</surname> <given-names>C.</given-names></name> <name><surname>Marie</surname> <given-names>C.</given-names></name> <name><surname>Fujioka</surname> <given-names>T.</given-names></name><etal/></person-group> (<year>2014a</year>). <article-title>Beat-induced fluctuations in auditory cortical beta-band activity: using EEG to measure age-related changes.</article-title> <source><italic>Front. Psychol.</italic></source> <volume>5</volume>:<issue>742</issue>. <pub-id pub-id-type="doi">10.3389/fpsyg.2014.00742</pub-id> <pub-id pub-id-type="pmid">25071691</pub-id></citation></ref>
<ref id="B17"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cirelli</surname> <given-names>L. K.</given-names></name> <name><surname>Einarson</surname> <given-names>K. M.</given-names></name> <name><surname>Trainor</surname> <given-names>L. J.</given-names></name></person-group> (<year>2014b</year>). <article-title>Interpersonal synchrony increases prosocial behavior in infants.</article-title> <source><italic>Dev. Sci.</italic></source> <volume>17</volume> <fpage>1003</fpage>&#x2013;<lpage>1011</lpage>. <pub-id pub-id-type="doi">10.1111/desc.12193</pub-id> <pub-id pub-id-type="pmid">25513669</pub-id></citation></ref>
<ref id="B18"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cirelli</surname> <given-names>L. K.</given-names></name> <name><surname>Trehub</surname> <given-names>S. E.</given-names></name> <name><surname>Trainor</surname> <given-names>L. J.</given-names></name></person-group> (<year>2018</year>). <article-title>Rhythm and melody as social signals for infants.</article-title> <source><italic>Ann. N. Y. Acad. Sci.</italic></source> <pub-id pub-id-type="doi">10.1111/nyas.13580</pub-id> <comment>[Epub ahead of print]</comment>. <pub-id pub-id-type="pmid">29512877</pub-id></citation></ref>
<ref id="B19"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cirelli</surname> <given-names>L. K.</given-names></name> <name><surname>Wan</surname> <given-names>S. J.</given-names></name> <name><surname>Trainor</surname> <given-names>L. J.</given-names></name></person-group> (<year>2014c</year>). <article-title>Fourteen-month-old infants use interpersonal synchrony as a cue to direct helpfulness.</article-title> <source><italic>Phil. Trans. R. Soc. B</italic></source> <volume>369</volume>: <issue>20130400</issue>. <pub-id pub-id-type="doi">10.1098/rstb.2013.0400</pub-id> <pub-id pub-id-type="pmid">25385778</pub-id></citation></ref>
<ref id="B20"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cirelli</surname> <given-names>L. K.</given-names></name> <name><surname>Wan</surname> <given-names>S. J.</given-names></name> <name><surname>Trainor</surname> <given-names>L. J.</given-names></name></person-group> (<year>2016</year>). <article-title>Social effects of movement synchrony: increased infant helpfulness only transfers to affiliates of synchronously moving partners.</article-title> <source><italic>Infancy</italic></source> <volume>21</volume> <fpage>1</fpage>&#x2013;<lpage>15</lpage>. <pub-id pub-id-type="doi">10.1111/infa.12140</pub-id></citation></ref>
<ref id="B21"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Codrons</surname> <given-names>E.</given-names></name> <name><surname>Bernardi</surname> <given-names>N. F.</given-names></name> <name><surname>Vandoni</surname> <given-names>M.</given-names></name> <name><surname>Bernardi</surname> <given-names>L.</given-names></name></person-group> (<year>2014</year>). <article-title>Spontaneous group synchronization of movements and respiratory rhythms.</article-title> <source><italic>PLoS One</italic></source> <volume>9</volume>:<issue>e107538</issue>. <pub-id pub-id-type="doi">10.1371/journal.pone.0107538</pub-id> <pub-id pub-id-type="pmid">25216280</pub-id></citation></ref>
<ref id="B22"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cravo</surname> <given-names>A. M.</given-names></name> <name><surname>Rohenkohl</surname> <given-names>G.</given-names></name> <name><surname>Wyart</surname> <given-names>V.</given-names></name> <name><surname>Nobre</surname> <given-names>A. C.</given-names></name></person-group> (<year>2013</year>). <article-title>Temporal expectation enhances contrast sensitivity by phase entrainment of low-frequency oscillations in visual cortex.</article-title> <source><italic>J. Neurosci.</italic></source> <volume>33</volume> <fpage>4002</fpage>&#x2013;<lpage>4010</lpage>. <pub-id pub-id-type="doi">10.1523/JNEUROSCI.4675-12.2013</pub-id> <pub-id pub-id-type="pmid">23447609</pub-id></citation></ref>
<ref id="B23"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Danielsen</surname> <given-names>A.</given-names></name> <name><surname>Helseth</surname> <given-names>I.</given-names></name></person-group> (<year>2016</year>). <article-title>Mediated immediacy: the relationship between auditory and visual dimensions of live performance in contemporary technology-based popular music.</article-title> <source><italic>Rock Music Stud.</italic></source> <volume>3</volume> <fpage>24</fpage>&#x2013;<lpage>40</lpage>.</citation></ref>
<ref id="B24"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Davies</surname> <given-names>M.</given-names></name> <name><surname>Madison</surname> <given-names>G.</given-names></name> <name><surname>Silva</surname> <given-names>P.</given-names></name> <name><surname>Gouyon</surname> <given-names>F.</given-names></name></person-group> (<year>2013</year>). <article-title>The effect of microtiming deviations on the perception of groove in short rhythms.</article-title> <source><italic>Music Percept.</italic></source> <volume>30</volume> <fpage>497</fpage>&#x2013;<lpage>510</lpage>. <pub-id pub-id-type="doi">10.1525/mp.2013.30.5.497</pub-id></citation></ref>
<ref id="B25"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Deller</surname> <given-names>R.</given-names></name></person-group> (<year>2011</year>). <article-title>Twittering on: audience research and participation using Twitter.</article-title> <source><italic>Participations</italic></source> <volume>8</volume> <fpage>216</fpage>&#x2013;<lpage>245</lpage>.</citation></ref>
<ref id="B26"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Eerola</surname> <given-names>T.</given-names></name> <name><surname>Luck</surname> <given-names>G.</given-names></name> <name><surname>Toiviainen</surname> <given-names>P.</given-names></name></person-group> (<year>2006</year>). <article-title>&#x201C;An investigation of pre-schoolers&#x2019; corporeal synchronization with music,&#x201D; in</article-title> <source><italic>Proceedings of the 9th International Conference Music Perception Cognition</italic></source> <role>eds</role> <person-group person-group-type="editor"><name><surname>Baroni</surname> <given-names>M.</given-names></name> <name><surname>Addessi</surname> <given-names>A. R.</given-names></name> <name><surname>Caterina</surname> <given-names>R.</given-names></name> <name><surname>Costa</surname> <given-names>M.</given-names></name></person-group> (<publisher-loc>Bologna</publisher-loc>: <publisher-name>ICMPC</publisher-name>) <fpage>472</fpage>&#x2013;<lpage>476</lpage>.</citation></ref>
<ref id="B27"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Egermann</surname> <given-names>H.</given-names></name> <name><surname>Pearce</surname> <given-names>M. T.</given-names></name> <name><surname>Wiggins</surname> <given-names>G. A.</given-names></name> <name><surname>McAdams</surname> <given-names>S.</given-names></name></person-group> (<year>2013</year>). <article-title>Probabilistic models of expectation violation predict psychophysiological emotional responses to live concert music.</article-title> <source><italic>Cogn. Affect. Behav. Neurosci.</italic></source> <volume>13</volume> <fpage>533</fpage>&#x2013;<lpage>553</lpage>. <pub-id pub-id-type="doi">10.3758/s13415-013-0161-y</pub-id> <pub-id pub-id-type="pmid">23605956</pub-id></citation></ref>
<ref id="B28"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Egermann</surname> <given-names>H.</given-names></name> <name><surname>Sutherland</surname> <given-names>M. E.</given-names></name> <name><surname>Grewe</surname> <given-names>O.</given-names></name> <name><surname>Nagel</surname> <given-names>F.</given-names></name> <name><surname>Kopiez</surname> <given-names>R.</given-names></name></person-group> (<year>2011</year>). <article-title>Does music listening in a social context alter experience? A physiological and psychological perspective on emotion.</article-title> <source><italic>Music. Sci.</italic></source> <volume>15</volume> <fpage>307</fpage>&#x2013;<lpage>323</lpage>. <pub-id pub-id-type="doi">10.1177/1029864911399497</pub-id></citation></ref>
<ref id="B29"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ekman</surname> <given-names>P.</given-names></name> <name><surname>Friesen</surname> <given-names>V. W.</given-names></name></person-group> (<year>1967</year>). <article-title>Head and body cues in the judgement of emotion: a reformulation.</article-title> <source><italic>Percept. Mot. Skills</italic></source> <volume>24</volume> <fpage>711</fpage>&#x2013;<lpage>724</lpage>. <pub-id pub-id-type="doi">10.2466/pms.1967.24.3.711</pub-id> <pub-id pub-id-type="pmid">5622764</pub-id></citation></ref>
<ref id="B30"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fancourt</surname> <given-names>D.</given-names></name> <name><surname>Steptoe</surname> <given-names>A.</given-names></name></person-group> (<year>2018</year>). <article-title>Cultural engagement predicts changes in cognitive function in older adults over a 10 year period: findings from the English Longitudinal Study of Ageing.</article-title> <source><italic>Sci. Rep.</italic></source> <volume>8</volume>:<issue>10226</issue>. <pub-id pub-id-type="doi">10.1038/s41598-018-28591-8</pub-id> <pub-id pub-id-type="pmid">29977058</pub-id></citation></ref>
<ref id="B31"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fancourt</surname> <given-names>D.</given-names></name> <name><surname>Williamon</surname> <given-names>A.</given-names></name></person-group> (<year>2016</year>). <article-title>Attending a concert reduces glucocorticoids, progesterone and the cortisol/DHEA ratio.</article-title> <source><italic>Public Health</italic></source> <volume>132</volume> <fpage>101</fpage>&#x2013;<lpage>104</lpage>. <pub-id pub-id-type="doi">10.1016/j.puhe.2015.12.005</pub-id> <pub-id pub-id-type="pmid">26852282</pub-id></citation></ref>
<ref id="B32"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fitch</surname> <given-names>W. T.</given-names></name></person-group> (<year>2016</year>). <article-title>Dance, music, meter and groove: a forgotten partnership.</article-title> <source><italic>Front. Hum. Neurosci.</italic></source> <volume>10</volume>:<issue>64</issue>. <pub-id pub-id-type="doi">10.3389/fnhum.2016.00064</pub-id> <pub-id pub-id-type="pmid">26973489</pub-id></citation></ref>
<ref id="B33"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Freeman</surname> <given-names>W. J.</given-names></name></person-group> (<year>2000</year>). <article-title>&#x201C;A neurobiological role of music in social bonding,&#x201D; in</article-title> <source><italic>The Origins of Music</italic></source> <role>eds</role> <person-group person-group-type="editor"><name><surname>Wallin</surname> <given-names>N.</given-names></name> <name><surname>Merkur</surname> <given-names>B.</given-names></name> <name><surname>Brown</surname> <given-names>S.</given-names></name></person-group> (<publisher-loc>Cambridge, MA</publisher-loc>: <publisher-name>MIT Press</publisher-name>) <fpage>411</fpage>&#x2013;<lpage>424</lpage>.</citation></ref>
<ref id="B34"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Frith</surname> <given-names>S.</given-names></name></person-group> (<year>2007</year>). <article-title>Live music matters.</article-title> <source><italic>Scot. Music Rev.</italic></source> <volume>1</volume> <fpage>1</fpage>&#x2013;<lpage>17</lpage>. <pub-id pub-id-type="doi">10.1017/S0261143013000068</pub-id></citation></ref>
<ref id="B35"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fujioka</surname> <given-names>T.</given-names></name> <name><surname>Ross</surname> <given-names>B.</given-names></name> <name><surname>Trainor</surname> <given-names>L. J.</given-names></name></person-group> (<year>2015</year>). <article-title>Beta-band oscillations represent auditory beat and its metrical hierarchy in perception and imagery.</article-title> <source><italic>J. Neurosci.</italic></source> <volume>35</volume> <fpage>15187</fpage>&#x2013;<lpage>15198</lpage>. <pub-id pub-id-type="doi">10.1523/JNEUROSCI.2397-15.2015</pub-id></citation></ref>
<ref id="B36"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fujioka</surname> <given-names>T.</given-names></name> <name><surname>Trainor</surname> <given-names>L. J.</given-names></name> <name><surname>Large</surname> <given-names>E. W.</given-names></name> <name><surname>Ross</surname> <given-names>B.</given-names></name></person-group> (<year>2012</year>). <article-title>Internalized timing of isochronous sounds is represented in neuromagnetic beta oscillations.</article-title> <source><italic>J. Neurosci.</italic></source> <volume>32</volume> <fpage>1791</fpage>&#x2013;<lpage>1802</lpage>. <pub-id pub-id-type="doi">10.1523/JNEUROSCI.4107-11.2012</pub-id> <pub-id pub-id-type="pmid">22302818</pub-id></citation></ref>
<ref id="B37"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Grahn</surname> <given-names>J. A.</given-names></name> <name><surname>Brett</surname> <given-names>M.</given-names></name></person-group> (<year>2007</year>). <article-title>Rhythm and beat perception in motor areas of the brain.</article-title> <source><italic>J. Cogn. Neurosci.</italic></source> <volume>19</volume> <fpage>893</fpage>&#x2013;<lpage>906</lpage>.</citation></ref>
<ref id="B38"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Grahn</surname> <given-names>J. A.</given-names></name> <name><surname>Rowe</surname> <given-names>J. B.</given-names></name></person-group> (<year>2009</year>). <article-title>Feeling the Beat: premotor and striatal interactions in musicians and nonmusicians during beat perception.</article-title> <source><italic>J. Neurosci.</italic></source> <volume>29</volume> <fpage>7540</fpage>&#x2013;<lpage>7548</lpage>. <pub-id pub-id-type="doi">10.1523/JNEUROSCI.2018-08.2009</pub-id> <pub-id pub-id-type="pmid">19515922</pub-id></citation></ref>
<ref id="B39"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Harrigan</surname> <given-names>J.</given-names></name> <name><surname>Rosenthal</surname> <given-names>R.</given-names></name> <name><surname>Scherer</surname> <given-names>K.</given-names></name></person-group> (<year>2008</year>). <source><italic>The New Handbook of Methods in Nonverbal Behavior Research.</italic></source> <publisher-loc>Oxford</publisher-loc>: <publisher-name>Oxford university press</publisher-name>. <pub-id pub-id-type="doi">10.1093/acprof:oso/9780198529620.001.0001</pub-id></citation></ref>
<ref id="B40"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hove</surname> <given-names>M. J.</given-names></name> <name><surname>Risen</surname> <given-names>J.</given-names></name></person-group> (<year>2009</year>). <article-title>It&#x2019;s all in the timing: interpersonal synchrony increases affiliation.</article-title> <source><italic>Soc. Cogn.</italic></source> <volume>27</volume> <fpage>949</fpage>&#x2013;<lpage>961</lpage>.</citation></ref>
<ref id="B41"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Janata</surname> <given-names>P.</given-names></name> <name><surname>Grafton</surname> <given-names>S. T.</given-names></name></person-group> (<year>2003</year>). <article-title>Swinging in the brain: shared neural substrates for behaviors related to sequencing and music.</article-title> <source><italic>Nat. Neurosci.</italic></source> <volume>6</volume> <fpage>682</fpage>&#x2013;<lpage>687</lpage>. <pub-id pub-id-type="doi">10.1038/nn1081</pub-id> <pub-id pub-id-type="pmid">12830159</pub-id></citation></ref>
<ref id="B42"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Janata</surname> <given-names>P.</given-names></name> <name><surname>Tomic</surname> <given-names>S. T.</given-names></name> <name><surname>Haberman</surname> <given-names>J. M.</given-names></name></person-group> (<year>2012</year>). <article-title>Sensorimotor coupling in music and the psychology of the groove.</article-title> <source><italic>J. Exp. Psychol. Gen.</italic></source> <volume>141</volume> <fpage>54</fpage>&#x2013;<lpage>75</lpage>. <pub-id pub-id-type="doi">10.1037/a0024208</pub-id> <pub-id pub-id-type="pmid">21767048</pub-id></citation></ref>
<ref id="B43"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kang</surname> <given-names>G. E.</given-names></name> <name><surname>Gross</surname> <given-names>M. M.</given-names></name></person-group> (<year>2016</year>). <article-title>The effect of emotion on movement smoothness during gait in healthy young adults.</article-title> <source><italic>J. Biomech.</italic></source> <volume>0</volume> <fpage>261</fpage>&#x2013;<lpage>268</lpage>. <pub-id pub-id-type="doi">10.1016/j.jbiomech.2016.10.044</pub-id> <pub-id pub-id-type="pmid">27823805</pub-id></citation></ref>
<ref id="B44"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kilchenmann</surname> <given-names>L.</given-names></name> <name><surname>Senn</surname> <given-names>O.</given-names></name></person-group> (<year>2015</year>). <article-title>Microtiming in swing and funk affects the body movement behavior of music expert listeners.</article-title> <source><italic>Front. Psychol.</italic></source> <volume>6</volume>:<issue>1232</issue>. <pub-id pub-id-type="doi">10.3389/fpsyg.2015.01232</pub-id> <pub-id pub-id-type="pmid">26347694</pub-id></citation></ref>
<ref id="B45"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lakens</surname> <given-names>D.</given-names></name> <name><surname>Stel</surname> <given-names>M.</given-names></name></person-group> (<year>2011</year>). <article-title>If they move in sync, they must feel in sync: movement synchrony leads to attributions of rapport and entitativity.</article-title> <source><italic>Soc. Cogn.</italic></source> <volume>29</volume> <fpage>1</fpage>&#x2013;<lpage>14</lpage>. <pub-id pub-id-type="doi">10.1521/soco.2011.29.1.1</pub-id></citation></ref>
<ref id="B46"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lamont</surname> <given-names>A.</given-names></name></person-group> (<year>2011</year>). <article-title>University students&#x2019; strong experiences of music: pleasure, engagement, and meaning.</article-title> <source><italic>Music. Sci.</italic></source> <volume>15</volume> <fpage>229</fpage>&#x2013;<lpage>249</lpage>. <pub-id pub-id-type="doi">10.1177/102986491101500206</pub-id></citation></ref>
<ref id="B47"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Launay</surname> <given-names>J.</given-names></name> <name><surname>Dean</surname> <given-names>R. T.</given-names></name> <name><surname>Bailes</surname> <given-names>F.</given-names></name></person-group> (<year>2013</year>). <article-title>Synchronization can influence trust following virtual interaction.</article-title> <source><italic>Exp. Psychol.</italic></source> <volume>60</volume> <fpage>53</fpage>&#x2013;<lpage>63</lpage>. <pub-id pub-id-type="doi">10.1027/1618-3169/a000173</pub-id> <pub-id pub-id-type="pmid">22935329</pub-id></citation></ref>
<ref id="B48"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Leante</surname> <given-names>L.</given-names></name></person-group> (<year>2016</year>). <article-title>&#x201C;Observing musicians/audience interaction in North Indian classical music performance,&#x201D; in</article-title> <source><italic>Musicians and their Audiences: Performance, Speech, and Mediation</italic></source> <role>eds</role> <person-group person-group-type="editor"><name><surname>Tsioulakis</surname> <given-names>I.</given-names></name> <name><surname>Hyt&#x00F6;nen-Ng</surname> <given-names>E.</given-names></name></person-group> (<publisher-loc>New York, NY</publisher-loc>: <publisher-name>Routledge</publisher-name>) <fpage>50</fpage>&#x2013;<lpage>65</lpage>.</citation></ref>
<ref id="B49"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Leman</surname> <given-names>M.</given-names></name> <name><surname>God&#x00F8;y</surname> <given-names>R. I.</given-names></name></person-group> (<year>2010</year>). <source><italic>Musical Gestures: Sound, Movement, and Meaning.</italic></source> <publisher-loc>New York, NY</publisher-loc>: <publisher-name>Routledge</publisher-name>.</citation></ref>
<ref id="B50"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Leow</surname> <given-names>L.</given-names></name> <name><surname>Parrott</surname> <given-names>T.</given-names></name> <name><surname>Grahn</surname> <given-names>J. A.</given-names></name></person-group> (<year>2014</year>). <article-title>Individual differences in beat perception affect gait responses to low- and high-groove music.</article-title> <source><italic>Front. Hum. Neurosci.</italic></source> <volume>8</volume>:<issue>811</issue>. <pub-id pub-id-type="doi">10.3389/fnhum.2014.00811</pub-id> <pub-id pub-id-type="pmid">25374521</pub-id></citation></ref>
<ref id="B51"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Leow</surname> <given-names>L.</given-names></name> <name><surname>Rinchon</surname> <given-names>C.</given-names></name> <name><surname>Grahn</surname> <given-names>J. A.</given-names></name></person-group> (<year>2015</year>). <article-title>Familiarity with music increases walking speed in rhythmic auditory cuing.</article-title> <source><italic>Ann. N. Y. Acad. Sci.</italic></source> <volume>1337</volume> <fpage>53</fpage>&#x2013;<lpage>61</lpage>. <pub-id pub-id-type="doi">10.1111/nyas.12658</pub-id> <pub-id pub-id-type="pmid">25773617</pub-id></citation></ref>
<ref id="B52"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Livingstone</surname> <given-names>S. R.</given-names></name> <name><surname>Palmer</surname> <given-names>C.</given-names></name></person-group> (<year>2016</year>). <article-title>Head movements encode emotions during speech and song.</article-title> <source><italic>Emotion</italic></source> <volume>16</volume> <fpage>365</fpage>&#x2013;<lpage>380</lpage>. <pub-id pub-id-type="doi">10.1037/emo0000106</pub-id> <pub-id pub-id-type="pmid">26501928</pub-id></citation></ref>
<ref id="B53"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Madison</surname> <given-names>G.</given-names></name> <name><surname>Gouyon</surname> <given-names>F.</given-names></name> <name><surname>Ull&#x00E9;n</surname> <given-names>F.</given-names></name> <name><surname>H&#x00F6;rnstr&#x00F6;m</surname> <given-names>K.</given-names></name></person-group> (<year>2011</year>). <article-title>Modeling the tendency for music to induce movement in humans: first correlations with low-level audio descriptors across music genres.</article-title> <source><italic>J. Exp. Psychol. Hum. Percept. Perform.</italic></source> <volume>37</volume> <fpage>1578</fpage>&#x2013;<lpage>1594</lpage>. <pub-id pub-id-type="doi">10.1037/a0024323</pub-id> <pub-id pub-id-type="pmid">21728462</pub-id></citation></ref>
<ref id="B54"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Madison</surname> <given-names>G.</given-names></name> <name><surname>Sioros</surname> <given-names>G.</given-names></name></person-group> (<year>2014</year>). <article-title>What musicians do to induce the sensation of groove in simple and complex melodies, and how listeners perceive it.</article-title> <source><italic>Front. Psychol.</italic></source> <volume>5</volume>:<issue>894</issue>. <pub-id pub-id-type="doi">10.3389/fpsyg.2014.00894</pub-id> <pub-id pub-id-type="pmid">25191286</pub-id></citation></ref>
<ref id="B55"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mazzoni</surname> <given-names>P.</given-names></name> <name><surname>Hristova</surname> <given-names>A.</given-names></name> <name><surname>Krakauer</surname> <given-names>J. W.</given-names></name></person-group> (<year>2007</year>). <article-title>Why Don&#x2019;t we move faster? Parkinson&#x2019;s Disease, movement vigor, and implicit motivation.</article-title> <source><italic>J. Neurosci.</italic></source> <volume>27</volume> <fpage>7105</fpage>&#x2013;<lpage>7116</lpage>. <pub-id pub-id-type="doi">10.1523/JNEUROSCI.0264-07.2007</pub-id></citation></ref>
<ref id="B56"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>McAdams</surname> <given-names>S.</given-names></name></person-group> (<year>2004</year>). <article-title>Influences of large-scale form on continuous ratings in response to a contemporary piece in a live concert setting.</article-title> <source><italic>Music Percept.</italic></source> <volume>22</volume> <fpage>297</fpage>&#x2013;<lpage>350</lpage>. <pub-id pub-id-type="doi">10.1525/mp.2004.22.2.297</pub-id></citation></ref>
<ref id="B57"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Merker</surname> <given-names>B. H.</given-names></name> <name><surname>Madison</surname> <given-names>G. S.</given-names></name> <name><surname>Eckerdal</surname> <given-names>P.</given-names></name></person-group> (<year>2009</year>). <article-title>On the role and origin of isochrony in human rhythmic entrainment.</article-title> <source><italic>Cortex</italic></source> <volume>45</volume> <fpage>4</fpage>&#x2013;<lpage>17</lpage>. <pub-id pub-id-type="doi">10.1016/j.cortex.2008.06.011</pub-id> <pub-id pub-id-type="pmid">19046745</pub-id></citation></ref>
<ref id="B58"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Miyata</surname> <given-names>K.</given-names></name> <name><surname>Varlet</surname> <given-names>M.</given-names></name> <name><surname>Miura</surname> <given-names>A.</given-names></name> <name><surname>Kudo</surname> <given-names>K.</given-names></name> <name><surname>Keller</surname> <given-names>P. E.</given-names></name></person-group> (<year>2018</year>). <article-title>Interpersonal visual interaction induces local and global stabilisation of rhythmic coordination.</article-title> <source><italic>Neurosci. Lett.</italic></source> <volume>682</volume> <fpage>132</fpage>&#x2013;<lpage>136</lpage>. <pub-id pub-id-type="doi">10.1016/j.neulet.2018.07.024</pub-id> <pub-id pub-id-type="pmid">30031033</pub-id></citation></ref>
<ref id="B59"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Moreau</surname> <given-names>F.</given-names></name></person-group> (<year>2013</year>). <article-title>The disruptive nature of digitization: the case of the recorded music industry.</article-title> <source><italic>Int. J. Arts Manag.</italic></source> <volume>15</volume> <fpage>18</fpage>&#x2013;<lpage>31</lpage>.</citation></ref>
<ref id="B60"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Morris</surname> <given-names>S. M.</given-names></name></person-group> (<year>2010</year>). <article-title>Achieving collective coherence: group effects on heart rate variability coherence and heart rhythm synchronization.</article-title> <source><italic>Altern. Ther.</italic></source> <volume>16</volume> <fpage>62</fpage>&#x2013;<lpage>72</lpage>.</citation></ref>
<ref id="B61"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mulder</surname> <given-names>J.</given-names></name> <name><surname>Ter Bogt</surname> <given-names>T.</given-names></name> <name><surname>Raaijmakers</surname> <given-names>Q. A. W.</given-names></name> <name><surname>Nic Gabhainn</surname> <given-names>S. N.</given-names></name> <name><surname>Sikkema</surname> <given-names>P.</given-names></name></person-group> (<year>2010</year>). <article-title>From death metal to R&#x0026;B? Consistency of music preferences among Dutch adolescents and young adults.</article-title> <source><italic>Psychol. Music</italic></source> <volume>38</volume> <fpage>67</fpage>&#x2013;<lpage>83</lpage>. <pub-id pub-id-type="doi">10.1177/0305735609104349</pub-id> <pub-id pub-id-type="pmid">22705367</pub-id></citation></ref>
<ref id="B62"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Nettl</surname> <given-names>B.</given-names></name> <name><surname>Russell</surname> <given-names>M.</given-names></name></person-group> <comment>(eds)</comment> (<year>1998</year>). <source><italic>In the Course of Performance: Studies in the World of Musical Improvisation.</italic></source> <publisher-loc>Chicago, IL</publisher-loc>: <publisher-name>University of Chicago Press</publisher-name>.</citation></ref>
<ref id="B63"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>North</surname> <given-names>A. C.</given-names></name> <name><surname>Hargreaves</surname> <given-names>D. J.</given-names></name></person-group> (<year>1999</year>). <article-title>Music and adolescent identity.</article-title> <source><italic>Music Educ. Res.</italic></source> <volume>1</volume> <fpage>75</fpage>&#x2013;<lpage>92</lpage>. <pub-id pub-id-type="doi">10.1080/1461380990010107</pub-id></citation></ref>
<ref id="B64"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>O&#x2019;Reilly</surname> <given-names>D.</given-names></name> <name><surname>Larsen</surname> <given-names>G.</given-names></name> <name><surname>Kubacki</surname> <given-names>K.</given-names></name></person-group> (<year>2014</year>). <article-title>&#x201C;Marketing live music,&#x201D; in</article-title> <source><italic>Coughing and Clapping: Investigating the Audience Experience</italic></source> <role>eds</role> <person-group person-group-type="editor"><name><surname>Burland</surname> <given-names>K.</given-names></name> <name><surname>Pitts</surname> <given-names>S.</given-names></name></person-group> (<publisher-loc>New York, NY</publisher-loc>: <publisher-name>Routledge</publisher-name>) <fpage>7</fpage>&#x2013;<lpage>20</lpage>.</citation></ref>
<ref id="B65"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Papies</surname> <given-names>D.</given-names></name> <name><surname>van Heerde</surname> <given-names>H. J.</given-names></name></person-group> (<year>2017</year>). <article-title>The dynamic interplay between recorded music and live concerts: the role of piracy, unbundling, and artist characteristics.</article-title> <source><italic>J. Mark.</italic></source> <volume>81</volume> <fpage>67</fpage>&#x2013;<lpage>87</lpage>. <pub-id pub-id-type="doi">10.1509/jm.14.0473</pub-id></citation></ref>
<ref id="B66"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Patel</surname> <given-names>A. D.</given-names></name> <name><surname>Iversen</surname> <given-names>J. R.</given-names></name> <name><surname>Bregman</surname> <given-names>M. R.</given-names></name> <name><surname>Schulz</surname> <given-names>I.</given-names></name></person-group> (<year>2009</year>). <article-title>Experimental evidence for synchronization to a musical beat in a nonhuman animal.</article-title> <source><italic>Curr. Biol.</italic></source> <volume>19</volume> <fpage>827</fpage>&#x2013;<lpage>830</lpage>. <pub-id pub-id-type="doi">10.1016/j.cub.2009.03.038</pub-id> <pub-id pub-id-type="pmid">19409790</pub-id></citation></ref>
<ref id="B67"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Phillips-Silver</surname> <given-names>J.</given-names></name> <name><surname>Keller</surname> <given-names>P. E.</given-names></name></person-group> (<year>2012</year>). <article-title>Searching for roots of entrainment and joint action in early musical interactions.</article-title> <source><italic>Front. Hum. Neurosci.</italic></source> <volume>6</volume>:<issue>26</issue>. <pub-id pub-id-type="doi">10.3389/fnhum.2012.00026</pub-id> <pub-id pub-id-type="pmid">22375113</pub-id></citation></ref>
<ref id="B68"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Phillips-Silver</surname> <given-names>J.</given-names></name> <name><surname>Trainor</surname> <given-names>L. J.</given-names></name></person-group> (<year>2008</year>). <article-title>Vestibular influence on auditory metrical interpretation.</article-title> <source><italic>Brain Cogn.</italic></source> <volume>67</volume> <fpage>94</fpage>&#x2013;<lpage>102</lpage>. <pub-id pub-id-type="doi">10.1016/j.bandc.2007.11.007</pub-id> <pub-id pub-id-type="pmid">18234407</pub-id></citation></ref>
<ref id="B69"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rennung</surname> <given-names>M.</given-names></name> <name><surname>Goritz</surname> <given-names>A. S.</given-names></name></person-group> (<year>2016</year>). <article-title>Prosocial consequences of interpersonal synchrony: a Meta-Analysis.</article-title> <source><italic>Z. Psychol.</italic></source> <volume>224</volume> <fpage>168</fpage>&#x2013;<lpage>189</lpage>. <pub-id pub-id-type="doi">10.1027/2151-2604/a000252</pub-id> <pub-id pub-id-type="pmid">28105388</pub-id></citation></ref>
<ref id="B70"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Richardson</surname> <given-names>M. J.</given-names></name> <name><surname>Marsh</surname> <given-names>K. L.</given-names></name> <name><surname>Isenhower</surname> <given-names>R. W.</given-names></name> <name><surname>Goodman</surname> <given-names>J. R. L.</given-names></name> <name><surname>Schmidt</surname> <given-names>R. C.</given-names></name></person-group> (<year>2007</year>). <article-title>Rocking together: dynamics of intentional and unintentional interpersonal coordination.</article-title> <source><italic>Hum. Mov. Sci.</italic></source> <volume>26</volume> <fpage>867</fpage>&#x2013;<lpage>891</lpage>. <pub-id pub-id-type="doi">10.1016/j.humov.2007.07.002</pub-id> <pub-id pub-id-type="pmid">17765345</pub-id></citation></ref>
<ref id="B71"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sakai</surname> <given-names>K.</given-names></name> <name><surname>Hikosaka</surname> <given-names>O.</given-names></name> <name><surname>Miyauchi</surname> <given-names>S.</given-names></name> <name><surname>Takino</surname> <given-names>R.</given-names></name> <name><surname>Tamada</surname> <given-names>T.</given-names></name> <name><surname>Iwata</surname> <given-names>N. K.</given-names></name><etal/></person-group> (<year>1999</year>). <article-title>Neural representation of a rhythm depends on its interval ratio.</article-title> <source><italic>J. Neurosci.</italic></source> <volume>19</volume> <fpage>10074</fpage>&#x2013;<lpage>10081</lpage>. <pub-id pub-id-type="pmid">10559415</pub-id></citation></ref>
<ref id="B72"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Schachner</surname> <given-names>A.</given-names></name> <name><surname>Brady</surname> <given-names>T. F.</given-names></name> <name><surname>Pepperberg</surname> <given-names>I. M.</given-names></name> <name><surname>Hauser</surname> <given-names>M. D.</given-names></name></person-group> (<year>2009</year>). <article-title>Spontaneous motor entrainment to music in multiple vocal mimicking species.</article-title> <source><italic>Curr. Biol.</italic></source> <volume>19</volume> <fpage>831</fpage>&#x2013;<lpage>836</lpage>. <pub-id pub-id-type="doi">10.1016/j.cub.2009.03.061</pub-id> <pub-id pub-id-type="pmid">19409786</pub-id></citation></ref>
<ref id="B73"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Schellenberg</surname> <given-names>E. G.</given-names></name> <name><surname>Peretz</surname> <given-names>I.</given-names></name> <name><surname>Vieillard</surname> <given-names>S.</given-names></name></person-group> (<year>2008</year>). <article-title>Liking forhappy- and sad-sounding music: effects of exposure.</article-title> <source><italic>Cogn. Emot.</italic></source> <volume>22</volume> <fpage>218</fpage>&#x2013;<lpage>237</lpage>.</citation></ref>
<ref id="B74"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Schroeder</surname> <given-names>C. E.</given-names></name> <name><surname>Lakatos</surname> <given-names>P.</given-names></name></person-group> (<year>2009</year>). <article-title>Low-frequency neuronal oscillations as instruments of sensory selection.</article-title> <source><italic>Trends Neurosci.</italic></source> <volume>32</volume> <fpage>9</fpage>&#x2013;<lpage>18</lpage>. <pub-id pub-id-type="doi">10.1016/j.tins.2008.09.012</pub-id> <pub-id pub-id-type="pmid">19012975</pub-id></citation></ref>
<ref id="B75"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Shoda</surname> <given-names>H.</given-names></name> <name><surname>Adachi</surname> <given-names>M.</given-names></name></person-group> (<year>2012</year>). <article-title>&#x201C;Effects of the listening context on the audience&#x2019;s perceptions of artistry, expressiveness, and affective qualities in the piano performance,&#x201D; in</article-title> <source><italic>Proceedings of the 12th International Conference on Music Perception and Cognition and the 8th Triennial Conference of the European Society for the Cognitive Sciences of Music</italic></source> <role>eds</role> <person-group person-group-type="editor"><name><surname>Cambouropoulos</surname> <given-names>E.</given-names></name> <name><surname>Tsougras</surname> <given-names>C.</given-names></name> <name><surname>Mavromatis</surname> <given-names>P.</given-names></name> <name><surname>Pastiadis</surname> <given-names>K.</given-names></name></person-group> (<publisher-loc>Thessaloniki</publisher-loc>: <publisher-name>Aristotle University of Thessaloniki</publisher-name>) <fpage>925</fpage>&#x2013;<lpage>929</lpage>.</citation></ref>
<ref id="B76"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Shoda</surname> <given-names>H.</given-names></name> <name><surname>Adachi</surname> <given-names>M.</given-names></name></person-group> (<year>2015</year>). <article-title>Why live recording sounds better: a case study of Schumann &#x2019; s Tr&#x00E4;umerei.</article-title> <source><italic>Front. Psychol.</italic></source> <volume>5</volume>:<issue>1564</issue>. <pub-id pub-id-type="doi">10.3389/fpsyg.2014.01564</pub-id> <pub-id pub-id-type="pmid">25620948</pub-id></citation></ref>
<ref id="B77"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Shoda</surname> <given-names>H.</given-names></name> <name><surname>Adachi</surname> <given-names>M.</given-names></name></person-group> (<year>2016</year>). <article-title>Expressivity, affective nuance, and presentation modality in a performer-to-audience communication.</article-title> <source><italic>Psychomusicol. Music Mind Brain</italic></source> <volume>26</volume> <fpage>167</fpage>&#x2013;<lpage>178</lpage>. <pub-id pub-id-type="doi">10.1037/pmu0000141</pub-id></citation></ref>
<ref id="B78"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Shoda</surname> <given-names>H.</given-names></name> <name><surname>Adachi</surname> <given-names>M.</given-names></name> <name><surname>Umeda</surname> <given-names>T.</given-names></name></person-group> (<year>2016</year>). <article-title>How live performance moves the human heart.</article-title> <source><italic>PLoS One</italic></source> <volume>11</volume>:<issue>e0154322</issue>. <pub-id pub-id-type="doi">10.1371/journal.pone.0154322</pub-id> <pub-id pub-id-type="pmid">27104377</pub-id></citation></ref>
<ref id="B79"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Silverberg</surname> <given-names>J. L.</given-names></name> <name><surname>Bierbaum</surname> <given-names>M.</given-names></name> <name><surname>Sethna</surname> <given-names>J. P.</given-names></name> <name><surname>Cohen</surname> <given-names>I.</given-names></name></person-group> (<year>2013</year>). <article-title>Collective motion of humans in mosh and circle pits at heavy metal concerts.</article-title> <source><italic>Phys. Rev. Lett.</italic></source> <volume>110</volume> <fpage>1</fpage>&#x2013;<lpage>5</lpage>. <pub-id pub-id-type="doi">10.1103/PhysRevLett.110.228701</pub-id> <pub-id pub-id-type="pmid">23767754</pub-id></citation></ref>
<ref id="B80"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Stevens</surname> <given-names>C.</given-names></name> <name><surname>Dean</surname> <given-names>R.</given-names></name> <name><surname>Vincs</surname> <given-names>K.</given-names></name> <name><surname>Schubert</surname> <given-names>E.</given-names></name></person-group> (<year>2014</year>). <article-title>&#x201C;In the heat of the moment: audience real-time response to music and dance performance,&#x201D; in</article-title> <source><italic>Coughing and Clapping: Investigating the Audience Experience</italic></source> <role>eds</role> <person-group person-group-type="editor"><name><surname>Burland</surname> <given-names>K.</given-names></name> <name><surname>Pitts</surname> <given-names>S.</given-names></name></person-group> (<publisher-loc>New York, NY</publisher-loc>: <publisher-name>Routledge</publisher-name>) <fpage>69</fpage>&#x2013;<lpage>88</lpage>.</citation></ref>
<ref id="B81"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Stevens</surname> <given-names>C. J.</given-names></name> <name><surname>Schubert</surname> <given-names>E.</given-names></name> <name><surname>Morris</surname> <given-names>R. H.</given-names></name> <name><surname>Frear</surname> <given-names>M.</given-names></name> <name><surname>Chen</surname> <given-names>J.</given-names></name> <name><surname>Healey</surname> <given-names>S.</given-names></name><etal/></person-group> (<year>2009</year>). <article-title>Cognition and the temporal arts: investigating audience response to dance using PDAs that record continuous data during live performance.</article-title> <source><italic>Int. J. Hum. Comput. Stud.</italic></source> <volume>67</volume> <fpage>800</fpage>&#x2013;<lpage>813</lpage>. <pub-id pub-id-type="doi">10.1016/J.IJHCS.2009.06.001</pub-id></citation></ref>
<ref id="B82"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Stupacher</surname> <given-names>J.</given-names></name> <name><surname>Hove</surname> <given-names>M. J.</given-names></name> <name><surname>Novembre</surname> <given-names>G.</given-names></name> <name><surname>Sch&#x00FC;tz-Bosbach</surname> <given-names>S.</given-names></name> <name><surname>Keller</surname> <given-names>P. E.</given-names></name></person-group> (<year>2013</year>). <article-title>Musical groove modulates motor cortex excitability: a TMS investigation.</article-title> <source><italic>Brain Cogn.</italic></source> <volume>82</volume> <fpage>127</fpage>&#x2013;<lpage>136</lpage>. <pub-id pub-id-type="doi">10.1016/j.bandc.2013.03.003</pub-id> <pub-id pub-id-type="pmid">23660433</pub-id></citation></ref>
<ref id="B83"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Stupacher</surname> <given-names>J.</given-names></name> <name><surname>Maes</surname> <given-names>P.-J.</given-names></name> <name><surname>Witte</surname> <given-names>M.</given-names></name> <name><surname>Wood</surname> <given-names>G.</given-names></name></person-group> (<year>2017</year>). <article-title>Music strengthens prosocial effects of interpersonal synchronization &#x2013; If you move in time with the beat.</article-title> <source><italic>J. Exp. Soc. Psychol.</italic></source> <volume>72</volume> <fpage>39</fpage>&#x2013;<lpage>44</lpage>. <pub-id pub-id-type="doi">10.1016/J.JESP.2017.04.007</pub-id></citation></ref>
<ref id="B84"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Theodorou</surname> <given-names>L.</given-names></name> <name><surname>Healey</surname> <given-names>P. G. T.</given-names></name> <name><surname>Smeraldi</surname> <given-names>F.</given-names></name></person-group> (<year>2016</year>). <article-title>&#x201C;Exploring audience behaviour during contemporary dance performances,&#x201D; in</article-title> <source><italic>Proceedings of the 3rd International Symposium on Movement and Computing - MOCO &#x2019;16</italic></source> <publisher-loc>Thessaloniki</publisher-loc> <fpage>1</fpage>&#x2013;<lpage>7</lpage>. <pub-id pub-id-type="doi">10.1145/2948910.2948928</pub-id></citation></ref>
<ref id="B85"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Toiviainen</surname> <given-names>P.</given-names></name> <name><surname>Luck</surname> <given-names>G.</given-names></name> <name><surname>Thompson</surname> <given-names>M. R.</given-names></name></person-group> (<year>2010</year>). <article-title>Embodied meter: hierarchical eigenmodes in music-induced movement.</article-title> <source><italic>Music Percept.</italic></source> <volume>28</volume> <fpage>59</fpage>&#x2013;<lpage>70</lpage>. <pub-id pub-id-type="doi">10.1525/mp.2010.28.1.59</pub-id></citation></ref>
<ref id="B86"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Trainor</surname> <given-names>L. J.</given-names></name> <name><surname>Cirelli</surname> <given-names>L. K.</given-names></name></person-group> (<year>2015</year>). <article-title>Rhythm and interpersonal synchrony in early social development.</article-title> <source><italic>Ann. N. Y. Acad. Sci.</italic></source> <volume>1337</volume> <fpage>45</fpage>&#x2013;<lpage>52</lpage>. <pub-id pub-id-type="doi">10.1111/nyas.12649</pub-id> <pub-id pub-id-type="pmid">25773616</pub-id></citation></ref>
<ref id="B87"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Trainor</surname> <given-names>L. J.</given-names></name> <name><surname>Gao</surname> <given-names>X.</given-names></name> <name><surname>Lei</surname> <given-names>J.</given-names></name> <name><surname>Lehtovaara</surname> <given-names>K.</given-names></name> <name><surname>Harris</surname> <given-names>L. R.</given-names></name></person-group> (<year>2009</year>). <article-title>The primal role of the vestibular system in determining musical rhythm.</article-title> <source><italic>Cortex</italic></source> <volume>45</volume> <fpage>35</fpage>&#x2013;<lpage>43</lpage>. <pub-id pub-id-type="doi">10.1016/j.cortex.2007.10.014</pub-id> <pub-id pub-id-type="pmid">19054504</pub-id></citation></ref>
<ref id="B88"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Valdesolo</surname> <given-names>P.</given-names></name> <name><surname>DeSteno</surname> <given-names>D.</given-names></name></person-group> (<year>2011</year>). <article-title>Synchrony and the social tuning of compassion.</article-title> <source><italic>Emotion</italic></source> <volume>11</volume> <fpage>262</fpage>&#x2013;<lpage>266</lpage>. <pub-id pub-id-type="doi">10.1037/a0021302</pub-id> <pub-id pub-id-type="pmid">21500895</pub-id></citation></ref>
<ref id="B89"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Valdesolo</surname> <given-names>P.</given-names></name> <name><surname>Ouyang</surname> <given-names>J.</given-names></name> <name><surname>DeSteno</surname> <given-names>D.</given-names></name></person-group> (<year>2010</year>). <article-title>The rhythm of joint action: synchrony promotes cooperative ability.</article-title> <source><italic>J. Exp. Soc. Psychol.</italic></source> <volume>46</volume> <fpage>693</fpage>&#x2013;<lpage>695</lpage>.</citation></ref>
<ref id="B90"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>van den Bosch</surname> <given-names>I.</given-names></name> <name><surname>Salimpoor</surname> <given-names>W. N.</given-names></name> <name><surname>Zatorre</surname> <given-names>R. J.</given-names></name></person-group> (<year>2013</year>). <article-title>Familiarity mediates the relationship between emotional arousal and pleasure during music listening.</article-title> <source><italic>Front. Hum. Neuroscieurosci.</italic></source> <volume>7</volume>:<issue>534</issue>. <pub-id pub-id-type="doi">10.3389/fnhum.2013.00534</pub-id> <pub-id pub-id-type="pmid">24046738</pub-id></citation></ref>
<ref id="B91"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>van Ulzen</surname> <given-names>N. R.</given-names></name> <name><surname>Lamoth</surname> <given-names>C. J. C.</given-names></name> <name><surname>Daffertshofer</surname> <given-names>A.</given-names></name> <name><surname>Semin</surname> <given-names>G. R.</given-names></name> <name><surname>Beek</surname> <given-names>P. J.</given-names></name></person-group> (<year>2008</year>). <article-title>Characteristics of instructed and uninstructed interpersonal coordination while walking side-by-side.</article-title> <source><italic>Neurosci. Lett.</italic></source> <volume>432</volume> <fpage>88</fpage>&#x2013;<lpage>93</lpage>. <pub-id pub-id-type="doi">10.1016/j.neulet.2007.11.070</pub-id> <pub-id pub-id-type="pmid">18242846</pub-id></citation></ref>
<ref id="B92"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wiltermuth</surname> <given-names>S. S.</given-names></name> <name><surname>Heath</surname> <given-names>C.</given-names></name></person-group> (<year>2009</year>). <article-title>Synchrony and cooperation.</article-title> <source><italic>Psychol. Sci.</italic></source> <volume>20</volume> <fpage>1</fpage>&#x2013;<lpage>5</lpage>. <pub-id pub-id-type="doi">10.1111/j.1467-9280.2008.02253.x</pub-id> <pub-id pub-id-type="pmid">19152536</pub-id></citation></ref>
<ref id="B93"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Witek</surname> <given-names>M. A. G.</given-names></name> <name><surname>Clarke</surname> <given-names>E. F.</given-names></name> <name><surname>Wallentin</surname> <given-names>M.</given-names></name> <name><surname>Kringelbach</surname> <given-names>M. L.</given-names></name> <name><surname>Vuust</surname> <given-names>P.</given-names></name></person-group> (<year>2014</year>). <article-title>Syncopation, body-movement and pleasure in groove music.</article-title> <source><italic>PLoS One</italic></source> <volume>9</volume>:<issue>e94446</issue>. <pub-id pub-id-type="doi">10.1371/journal.pone.0094446</pub-id> <pub-id pub-id-type="pmid">24740381</pub-id></citation></ref>
<ref id="B94"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Witkower</surname> <given-names>Z.</given-names></name> <name><surname>Tracy</surname> <given-names>J. L.</given-names></name></person-group> (<year>2018</year>). <article-title>Bodily communication of emotion: evidence for extrafacial behavioral expressions and available coding systems.</article-title> <source><italic>Emot. Rev</italic></source> <pub-id pub-id-type="doi">10.1177/1754073917749880</pub-id> <comment>[Epub ahead of print]</comment>.</citation></ref>
<ref id="B95"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Woolhouse</surname> <given-names>M. H.</given-names></name> <name><surname>Tidhar</surname> <given-names>D.</given-names></name> <name><surname>Cross</surname> <given-names>I.</given-names></name></person-group> (<year>2016</year>). <article-title>Effects on inter-personal memory of dancing in time with others.</article-title> <source><italic>Front. Psychol.</italic></source> <volume>7</volume>:<issue>167</issue>. <pub-id pub-id-type="doi">10.3389/fpsyg.2016.00167</pub-id> <pub-id pub-id-type="pmid">26941668</pub-id></citation></ref>
<ref id="B96"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Yoshie</surname> <given-names>M.</given-names></name> <name><surname>Nagai</surname> <given-names>Y.</given-names></name> <name><surname>Critchley</surname> <given-names>H. D.</given-names></name> <name><surname>Harrison</surname> <given-names>N. A.</given-names></name></person-group> (<year>2016</year>). <article-title>Why I tense up when you watch me: inferior parietal cortex mediates an audience&#x2019;s influence on motor performance.</article-title> <source><italic>Sci. Rep.</italic></source> <volume>6</volume> <fpage>1</fpage>&#x2013;<lpage>11</lpage>. <pub-id pub-id-type="doi">10.1038/srep19305</pub-id> <pub-id pub-id-type="pmid">26787326</pub-id></citation></ref>
<ref id="B97"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zajonc</surname> <given-names>R. B.</given-names></name></person-group> (<year>1965</year>). <article-title>Social facilitation.</article-title> <source><italic>Science</italic></source> <volume>149</volume> <fpage>269</fpage>&#x2013;<lpage>274</lpage>. <pub-id pub-id-type="doi">10.1126/science.149.3681.269</pub-id></citation></ref>
<ref id="B98"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zatorre</surname> <given-names>R. J.</given-names></name> <name><surname>Chen</surname> <given-names>J. L.</given-names></name> <name><surname>Penhune</surname> <given-names>V. B.</given-names></name></person-group> (<year>2007</year>). <article-title>When the brain plays music: auditory-motor interactions in music perception and production.</article-title> <source><italic>Nat. Rev. Neurosci.</italic></source> <volume>8</volume> <fpage>547</fpage>&#x2013;<lpage>558</lpage>. <pub-id pub-id-type="doi">10.1038/nrn2152</pub-id> <pub-id pub-id-type="pmid">17585307</pub-id></citation></ref>
<ref id="B99"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zentner</surname> <given-names>M.</given-names></name> <name><surname>Eerola</surname> <given-names>T.</given-names></name></person-group> (<year>2010</year>). <article-title>Rhythmic engagement with music in infancy.</article-title> <source><italic>Proc. Natl. Acad. Sci. U.S.A.</italic></source> <volume>107</volume> <fpage>5768</fpage>&#x2013;<lpage>5773</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.1000121107</pub-id> <pub-id pub-id-type="pmid">20231438</pub-id></citation></ref>
</ref-list>
<fn-group>
<fn id="fn01"><label>1</label><p>Blown Wide Open was first released in 1997. The containing album achieved double platinum sales status (CRIA) and itself achieved a peak position of #8 on the Canadian rock charts.</p></fn>
<fn id="fn02"><label>2</label><p><ext-link ext-link-type="uri" xlink:href="http://LIVELab.mcmaster.ca">http://LIVELab.mcmaster.ca</ext-link></p></fn>
<fn id="fn03"><label>3</label><p><ext-link ext-link-type="uri" xlink:href="https://www.youtube.com/watch?v=0h-Js1KtQa4">https://www.youtube.com/watch?v=0h-Js1KtQa4</ext-link></p></fn>
</fn-group>
</back>
</article>