<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Neurosci.</journal-id>
<journal-title>Frontiers in Neuroscience</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Neurosci.</abbrev-journal-title>
<issn pub-type="epub">1662-453X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fnins.2021.608156</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Neuroscience</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Higher Right Hemisphere Gamma Band Lateralization and Suggestion of a Sensitive Period for Vocal Auditory Emotional Stimuli Recognition in Unilateral Cochlear Implant Children: An EEG Study</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name><surname>Cartocci</surname> <given-names>Giulia</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/356150/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Giorgi</surname> <given-names>Andrea</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1245983/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Inguscio</surname> <given-names>Bianca M. S.</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1092371/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Scorpecci</surname> <given-names>Alessandro</given-names></name>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/714205/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Giannantonio</surname> <given-names>Sara</given-names></name>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1245730/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>De Lucia</surname> <given-names>Antonietta</given-names></name>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1172440/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Garofalo</surname> <given-names>Sabina</given-names></name>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1245963/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Grassia</surname> <given-names>Rosa</given-names></name>
<xref ref-type="aff" rid="aff6"><sup>6</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Leone</surname> <given-names>Carlo Antonio</given-names></name>
<xref ref-type="aff" rid="aff6"><sup>6</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Longo</surname> <given-names>Patrizia</given-names></name>
<xref ref-type="aff" rid="aff7"><sup>7</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Freni</surname> <given-names>Francesco</given-names></name>
<xref ref-type="aff" rid="aff7"><sup>7</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1219878/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Malerba</surname> <given-names>Paolo</given-names></name>
<xref ref-type="aff" rid="aff8"><sup>8</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1245919/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Babiloni</surname> <given-names>Fabio</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff9"><sup>9</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/2666/overview"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Laboratory of Industrial Neuroscience, Department of Molecular Medicine, Sapienza University of Rome</institution>, <addr-line>Rome</addr-line>, <country>Italy</country></aff>
<aff id="aff2"><sup>2</sup><institution>BrainSigns Srl</institution>, <addr-line>Rome</addr-line>, <country>Italy</country></aff>
<aff id="aff3"><sup>3</sup><institution>Cochlear Implant Unit, Department of Sensory Organs, Sapienza University of Rome</institution>, <addr-line>Rome</addr-line>, <country>Italy</country></aff>
<aff id="aff4"><sup>4</sup><institution>Audiology and Otosurgery Unit, &#x201C;Bambino Ges&#x00F9;&#x201D; Pediatric Hospital and Research Institute</institution>, <addr-line>Rome</addr-line>, <country>Italy</country></aff>
<aff id="aff5"><sup>5</sup><institution>Otology and Cochlear Implant Unit, Regional Referral Centre Children&#x2019;s Hospital &#x201C;Santobono-Pausilipon&#x201D;</institution>, <addr-line>Naples</addr-line>, <country>Italy</country></aff>
<aff id="aff6"><sup>6</sup><institution>Department of Otolaryngology/Head and Neck Surgery, Monaldi Hospital</institution>, <addr-line>Naples</addr-line>, <country>Italy</country></aff>
<aff id="aff7"><sup>7</sup><institution>Department of Otorhinolaryngology, University of Messina</institution>, <addr-line>Messina</addr-line>, <country>Italy</country></aff>
<aff id="aff8"><sup>8</sup><institution>Cochlear Italia Srl</institution>, <addr-line>Bologna</addr-line>, <country>Italy</country></aff>
<aff id="aff9"><sup>9</sup><institution>Department of Computer Science and Technology, Hangzhou Dianzi University, Xiasha Higher Education Zone</institution>, <addr-line>Hangzhou</addr-line>, <country>China</country></aff>
<author-notes>
<fn fn-type="edited-by"><p>Edited by: Marc Sch&#x00F6;nwiesner, Leipzig University, Germany</p></fn>
<fn fn-type="edited-by"><p>Reviewed by: S&#x00E9;bastien Paquette, McGill University, Canada; Andrej Kral, Hannover Medical School, Germany</p></fn>
<corresp id="c001">&#x002A;Correspondence: Giulia Cartocci, <email>giulia.cartocci@uniroma1.it</email></corresp>
<fn fn-type="other" id="fn004"><p>This article was submitted to Auditory Cognitive Neuroscience, a section of the journal Frontiers in Neuroscience</p></fn>
</author-notes>
<pub-date pub-type="epub">
<day>09</day>
<month>03</month>
<year>2021</year>
</pub-date>
<pub-date pub-type="collection">
<year>2021</year>
</pub-date>
<volume>15</volume>
<elocation-id>608156</elocation-id>
<history>
<date date-type="received">
<day>19</day>
<month>09</month>
<year>2020</year>
</date>
<date date-type="accepted">
<day>01</day>
<month>02</month>
<year>2021</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2021 Cartocci, Giorgi, Inguscio, Scorpecci, Giannantonio, De Lucia, Garofalo, Grassia, Leone, Longo, Freni, Malerba and Babiloni.</copyright-statement>
<copyright-year>2021</copyright-year>
<copyright-holder>Cartocci, Giorgi, Inguscio, Scorpecci, Giannantonio, De Lucia, Garofalo, Grassia, Leone, Longo, Freni, Malerba and Babiloni</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p></license>
</permissions>
<abstract>
<p>In deaf children, huge emphasis was given to language; however, emotional cues decoding and production appear of pivotal importance for communication capabilities. Concerning neurophysiological correlates of emotional processing, the gamma band activity appears a useful tool adopted for emotion classification and related to the conscious elaboration of emotions. Starting from these considerations, the following items have been investigated: (i) whether emotional auditory stimuli processing differs between normal-hearing (NH) children and children using a cochlear implant (CI), given the non-physiological development of the auditory system in the latter group; (ii) whether the age at CI surgery influences emotion recognition capabilities; and (iii) in light of the right hemisphere hypothesis for emotional processing, whether the CI side influences the processing of emotional cues in unilateral CI (UCI) children. To answer these matters, 9 UCI (9.47 &#x00B1; 2.33 years old) and 10 NH (10.95 &#x00B1; 2.11 years old) children were asked to recognize nonverbal vocalizations belonging to three emotional states: positive (achievement, amusement, contentment, relief), negative (anger, disgust, fear, sadness), and neutral (neutral, surprise). Results showed better performances in NH than UCI children in emotional states recognition. The UCI group showed increased gamma activity lateralization index (LI) (relative higher right hemisphere activity) in comparison to the NH group in response to emotional auditory cues. Moreover, LI gamma values were negatively correlated with the percentage of correct responses in emotion recognition. Such observations could be explained by a deficit in UCI children in engaging the left hemisphere for more demanding emotional task, or alternatively by a higher conscious elaboration in UCI than NH children. Additionally, for the UCI group, there was no difference between the CI side and the contralateral side in gamma activity, but a higher gamma activity in the right in comparison to the left hemisphere was found. Therefore, the CI side did not appear to influence the physiologic hemispheric lateralization of emotional processing. Finally, a negative correlation was shown between the age at the CI surgery and the percentage of correct responses in emotion recognition and then suggesting the occurrence of a sensitive period for CI surgery for best emotion recognition skills development.</p>
</abstract>
<kwd-group>
<kwd>lateralization index</kwd>
<kwd>right hemisphere emotion hypothesis</kwd>
<kwd>deafness</kwd>
<kwd>hearing loss</kwd>
<kwd>brain activity</kwd>
<kwd>length of cochlear implant use</kwd>
<kwd>sensitive period</kwd>
<kwd>auditory age</kwd>
</kwd-group>
<contract-sponsor id="cn001">Cochlear<named-content content-type="fundref-id">10.13039/100008626</named-content></contract-sponsor>
<counts>
<fig-count count="6"/>
<table-count count="2"/>
<equation-count count="1"/>
<ref-count count="72"/>
<page-count count="10"/>
<word-count count="0"/>
</counts>
</article-meta>
</front>
<body>
<sec id="S1">
<title>Introduction</title>
<p>Processing emotional expressions is fundamental for social interactions and communication; in fact, from a very young age, infants are able to detect visual and auditory information in faces and voices of people around them (<xref ref-type="bibr" rid="B28">Grossmann, 2010</xref>). Such capability would develop into the skill to recognize and discriminate emotions, thanks to the contribution of the experience and of the maturation of sensory and perceptual systems. This recognition involves a multisensory effect, evidenced by integration effects of facial and vocal information on cerebral activity, which are apparent both at the level of heteromodal cortical regions of convergence (e.g., bilateral posterior superior temporal sulcus), and at unimodal levels of sensory processing (<xref ref-type="bibr" rid="B8">Campanella and Belin, 2007</xref>; <xref ref-type="bibr" rid="B14">Davies-Thompson et al., 2019</xref>; <xref ref-type="bibr" rid="B71">Young et al., 2020</xref>).</p>
<p>In relation to such cross-sensorial and unisensorial effects, hearing impairment could compromise multisensory integration, in relation to its onset, etiology, and severity, leading the patient to rely only or predominantly on the visual modality in communication, including emotional perception and expression (<xref ref-type="bibr" rid="B42">Mildner and Koska, 2014</xref>). In fact, for 92% of children with cochlear implant (CI), perception was dominated by vision when visual and auditory speech information conflicted (<xref ref-type="bibr" rid="B57">Schorr et al., 2005</xref>). This statement is supported by the results of studies employing the McGurk effect on CI users, which requires the integration of auditory and visual sensory stimuli. For instance, children who received their CI prior to age 30 months accurately identified the incongruent auditory&#x2013;visual stimuli, whereas children who received their CI after 30 months of age did not (<xref ref-type="bibr" rid="B56">Schorr, 2005</xref>). This evidence appears particularly worthy because differently from adults, who mainly prefer visual modality, infants and young children show auditory processing preference, but in children with congenital hearing impairment, such auditory dominance appears absent. Interestingly, in post-lingually deaf CI patients, such greater relying on visual information, indexed by higher speech-reading performances than normal-hearing (NH) individuals, led instead to an increased capacity of integrating visual and distorted speech signals, producing higher visuoauditory performances (<xref ref-type="bibr" rid="B50">Rouger et al., 2007</xref>). Furthermore, such evidence in post-lingual deaf patients was also supported by neurophysiological assessments, evidencing a positive correlation between visual activity and auditory speech recovery, suggesting a facilitating role for the visual modality in auditory words&#x2019; perception during communicative situations (<xref ref-type="bibr" rid="B61">Strelnikov et al., 2013</xref>). With respect to general processing preferences, contrary to adults, who prefer the visual modality (<xref ref-type="bibr" rid="B55">Scherer, 2003</xref>), infants and young children exhibit auditory processing preference. Importantly, congenital hearing-impaired children who underwent auditory&#x2013;verbal therapy (a therapy limiting visual cue in order to strengthen the auditory pathway for language learning) reported a behavior similar to NH children, which is an overall auditory preference in response to audiovisual stimuli, although responses did not significantly differ from chance (<xref ref-type="bibr" rid="B72">Zupan and Sussman, 2009</xref>). Contrary to NH individuals, those with hearing impairments do not benefit from the addition of the auditory cues to the visual mode (e.g., <xref ref-type="bibr" rid="B43">Most and Aviner, 2009</xref>). Although the accuracy of emotion perception among children with hearing loss (HL) was lower than that of NH children in auditory, visual, and auditory&#x2013;visual conditions, in prelingually deaf very young children (about 4&#x2013;6 years old), the combined auditory&#x2013;visual mode significantly surpassed the auditory or visual modes alone, as in the NH group, supporting the use of auditory information for emotion perception, probably thanks to intensive rehabilitation (<xref ref-type="bibr" rid="B44">Most and Michaelis, 2012</xref>) and neuroplasticity. Such results strongly support the hypothesis of a sensitive period (<xref ref-type="bibr" rid="B36">Kral et al., 2001</xref>; <xref ref-type="bibr" rid="B59">Sharma et al., 2005</xref>; <xref ref-type="bibr" rid="B24">Gilley et al., 2010</xref>) for the establishment of the integration of auditory and visual stimuli.</p>
<p>Thanks to their activity of direct stimulation of the acoustic nerve, converting the auditory stimuli into electrical signals directed to the brain, CIs can successfully restore hearing in profoundly deaf individuals. After intensive rehabilitation, most CI users can reach a good level of speech comprehension. However, the acoustic signal provided by the device is severely degraded, resulting in a poor frequency resolution and deficits in pitch patterns (<xref ref-type="bibr" rid="B22">Gfeller et al., 2007</xref>; <xref ref-type="bibr" rid="B18">eHopyan et al., 2012</xref>) and pitch changes or direction discrimination (<xref ref-type="bibr" rid="B21">Gfeller et al., 2002</xref>) in comparison to NH controls.</p>
<p>Hearing-impaired children go through an early auditory development that is different from that of NH toddlers. This condition would affect their judgment of the emotional content of a stimulus, insofar as the auditory modality resulted as particularly important for the communication of emotions in young children (<xref ref-type="bibr" rid="B6">Baldwin and Moses, 1996</xref>; <xref ref-type="bibr" rid="B3">Akhtar and Gernsbacher, 2008</xref>). The study of such mechanisms appears of great impact since about 600,000 patients world-wide are CI users (<xref ref-type="bibr" rid="B62">The Ear Foundation, 2017</xref>), and many of them are children who were born deaf or lost their hearing within the first few years of life. CI children are a paradigmatic model for the study of emotion recognition skills, as due to the early acquisition of deafness, they learned language through the degraded input of the CI, which greatly affects harmonic pitch perception. This ability is strongly necessary for emotion recognition in voices, and its deficiency could have implications on how child CI users learn to produce vocal emotions (<xref ref-type="bibr" rid="B13">Damm et al., 2019</xref>). However, a very recent study provided evidence that also deaf people can develop skills for emotional vocalizations despite the presence of some differences in comparison to NH adults (<xref ref-type="bibr" rid="B51">Sauter et al., 2019</xref>). Using unilateral CI (UCI) in children, due to non-physiological development of their auditory system and to their asymmetry in receiving auditory inputs, represents a powerful model of investigation of the possible modulation of the hemispheric specialization and of auditory-related emotional skills development in relation to the restored hearing condition. Additionally, such participants would provide evidence of the possible modulation of the physiological processes of emotion recognition following the restoration of the auditory capabilities, of which the exact time of beginning is due to the CI surgery time. Children, 7&#x2013;13 years of age, using UCIs perform more poorly than age- and gender-matched controls on the affective speech prosody task but as well as controls in tasks of facial affect perception (<xref ref-type="bibr" rid="B29">Hopyan-Misakyan et al., 2009</xref>), as measured by the DANVA-2 (<xref ref-type="bibr" rid="B46">Nowicki and Duke, 1994</xref>).</p>
<p>One of the few studies that investigated both auditory recognition and vocal production of emotions did not find any consistent advantage for age-matched NH participants in comparison to three prelingually, bilaterally, profoundly deaf children aged 6&#x2013;7 years who received CIs before age 2 years; however, confusion matrices among three of the investigated emotions (anger, happiness, and fear) showed that children with and without hearing impairment may rely on different cues (<xref ref-type="bibr" rid="B42">Mildner and Koska, 2014</xref>).</p>
<p>With respect to emotional skills attainment and in relation to the hemispheric specialization for emotional processing (<xref ref-type="bibr" rid="B19">Gainotti, 2019</xref>), it is interesting to consider that patients enrolled in the present study were UCI users, that is, single-side deaf (SSD) patients. In fact, in SSD population, it was evidenced that the occurrence of a massive reorganization of aural preference in favor of the hearing ear is greater than the precocity of unilateral HL onset, therefore supporting the importance of a short time between the first and second implantation in children (<xref ref-type="bibr" rid="B37">Kral et al., 2013</xref>; <xref ref-type="bibr" rid="B26">Gordon et al., 2015</xref>; <xref ref-type="bibr" rid="B27">Gordon and Papsin, 2019</xref>).</p>
<p>Concerning neural correlates of emotion recognition, gamma band electroencephalogram (EEG) was found to be particularly sensitive for emotion classification (<xref ref-type="bibr" rid="B39">Li and Lu, 2009</xref>; <xref ref-type="bibr" rid="B70">Yang et al., 2020</xref>). Gamma band cerebral activity has been previously linked to facial emotion recognition processes; for instance, a right hemisphere dominance in gamma activity was found during emotional processing of faces in comparison to neutral ones (e.g., <xref ref-type="bibr" rid="B5">Balconi and Lucchiari, 2008</xref>). Such evidences are in accord to the right hemisphere hypothesis for emotion processing, that starting from observations on patients with single hemisphere lesions states the dominance of the right hemisphere for every kind of emotional response (<xref ref-type="bibr" rid="B19">Gainotti, 2019</xref>). With specific regard to emotional prosody processing and brain activity lateralization, Kotz and colleagues hypothesized that (i) differentially lateralized subprocesses underlie emotional prosody processing and (ii) the lateralization of emotional prosody can be modulated by methodological factors (<xref ref-type="bibr" rid="B34">Kotz et al., 2006</xref>). Furthermore, concerning verbal stimuli, in adult CI users, gamma band&#x2013;induced activity was found to be higher in NH than in CI users, irrespectively of the valence of the emotions investigated (<xref ref-type="bibr" rid="B1">Agrawal et al., 2013</xref>).</p>
<p>On the base of the previous issues, the following experimental questions have been approached in a population of NH and UCI children: (i) Given the non-physiological development of the auditory system in deaf children who underwent hearing restoration through CI use, are the emotional auditory stimuli processed in a similar way than NH children? (ii) Is the auditory age, meant as the age at CI surgery, crucial in the capacity of recognizing emotions? (iii) In light of the evidence that the right hemisphere has a unique contribution in emotional processing &#x2013; summarized in the right hemisphere emotion hypothesis &#x2013; does the side of the CI influence the processing of emotional cues in UCI children, or is the &#x201C;physiological right lateralization&#x201D; respected?</p>
</sec>
<sec id="S2" sec-type="materials|methods">
<title>Materials and Methods</title>
<sec id="S2.SS1">
<title>Participants</title>
<p>For the present study, 10 NH (6 female, 4 male; 10.95 &#x00B1; 2.11 years old) and 9 UCI user (UCI; 5 female, 4 male; 9.47 &#x00B1; 2.33 years old) children were enrolled. Six children had their CI in their right ear and three in their left ear; at the moment of the test, none of them wore any hearing aid in their contralateral ear. All participants were right-handed except for two children: one belonging to the NH and one to the UCI group. Further clinical details of the UCI group are reported in <xref ref-type="table" rid="T1">Table 1</xref>.</p>
<table-wrap position="float" id="T1">
<label>TABLE 1</label>
<caption><p>Demographics concerning the UCI group, in particular etiology of deafness, its onset, and duration of deafness before CI surgery.</p></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<td valign="top" align="left">Participants</td>
<td valign="top" align="center">Age (years)</td>
<td valign="top" align="center">Etiology</td>
<td valign="top" align="center">Onset of deafness</td>
<td valign="top" align="center">Period of Deafness (years)</td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">PI</td>
<td valign="top" align="center">11,39</td>
<td valign="top" align="center">Unknown</td>
<td valign="top" align="center">Birth</td>
<td valign="top" align="center">1,38</td>
</tr>
<tr>
<td valign="top" align="left">P2</td>
<td valign="top" align="center">12,04</td>
<td valign="top" align="center">Unknown</td>
<td valign="top" align="center">3 years old</td>
<td valign="top" align="center">5,91</td>
</tr>
<tr>
<td valign="top" align="left">P3</td>
<td valign="top" align="center">11,66</td>
<td valign="top" align="center">Unknown</td>
<td valign="top" align="center">4 years old</td>
<td valign="top" align="center">2,25</td>
</tr>
<tr>
<td valign="top" align="left">P4</td>
<td valign="top" align="center">10,22</td>
<td valign="top" align="center">Homozygous mutation of the connexin-26 gene</td>
<td valign="top" align="center">Birth</td>
<td valign="top" align="center">1,11</td>
</tr>
<tr>
<td valign="top" align="left">P5</td>
<td valign="top" align="center">7,08</td>
<td valign="top" align="center">Congenital CMV infection</td>
<td valign="top" align="center">Birth</td>
<td valign="top" align="center">3,82</td>
</tr>
<tr>
<td valign="top" align="left">P6</td>
<td valign="top" align="center">9,99</td>
<td valign="top" align="center">Homozygous mutation of the connexin-26 gene</td>
<td valign="top" align="center">Birth</td>
<td valign="top" align="center">2,93</td>
</tr>
<tr>
<td valign="top" align="left">P7</td>
<td valign="top" align="center">9,24</td>
<td valign="top" align="center">Homozygous mutation of the connexin-26 gene</td>
<td valign="top" align="center">Birth</td>
<td valign="top" align="center">8,16</td>
</tr>
<tr>
<td valign="top" align="left">P8</td>
<td valign="top" align="center">12,57</td>
<td valign="top" align="center">Unknown</td>
<td valign="top" align="center">3,5 years old</td>
<td valign="top" align="center">6,41</td>
</tr>
<tr>
<td valign="top" align="left">P9</td>
<td valign="top" align="center">14,37</td>
<td valign="top" align="center">Unknown</td>
<td valign="top" align="center">Birth</td>
<td valign="top" align="center">13,18</td>
</tr>
</tbody>
</table></table-wrap>
</sec>
<sec id="S2.SS2">
<title>Protocol</title>
<p>The task consisted of the recognition of nonverbal vocalizations belonging to a database previously validated and employed in several studies (<xref ref-type="bibr" rid="B52">Sauter et al., 2006</xref>, <xref ref-type="bibr" rid="B53">2010</xref>, <xref ref-type="bibr" rid="B54">2013</xref>) and grouped into three emotional states: positive (achievement, amusement, contentment, relief), negative (anger, disgust, fear, sadness), and neutral (neutral, surprise), which participants were asked to match with the corresponding emotional picture (<xref ref-type="fig" rid="F1">Figure 1</xref>). For each emotion, six different audio stimuli were reproduced, whereas there was a single corresponding emotional picture for each emotion. The emotional audio stimuli had a mean duration of 1,354.25 &#x00B1; 223.39 ms and were delivered at 65 dB HL (<xref ref-type="bibr" rid="B10">Cartocci et al., 2015</xref>, <xref ref-type="bibr" rid="B9">2018</xref>; <xref ref-type="bibr" rid="B41">Marsella et al., 2017</xref>; <xref ref-type="bibr" rid="B49">Piccioni et al., 2018</xref>) through two loudspeakers placed in front of and behind the participant at the distance of 1 m each, to meet CIs&#x2019; best requirements for their use. Participants underwent training with the kind of emotional stimuli employed in the study and a familiarization with the experimental protocol. Once the researcher verified the comprehension of the emotional stimuli and the task by the participant, he/she was asked to carefully listen to the emotional audio and then to identify the emotion reproduced by the stimulus pressing one out of five buttons on a customized keyboard, corresponding to the target emotional picture. For instance, the participant heard a laugh, and he/she had to identify the corresponding picture, a smiling young lady, out of five options. There was no time limit set for such identification and matching with the target emotion. Each picture representing the target emotion was placed at least once (and maximum twice) in each of the five positions on the screen. The number of five pictures among which the participant had to identify the target stimulus was chosen in accordance with <xref ref-type="bibr" rid="B47">Orsini et al. (1987)</xref>, who found for the range of age of the enrolled participants a digit span of more than 4.5 items for both males and females. Stimuli were delivered through E-prime software, in a pseudorandomized order so that it was not possible that two stimuli belonging to the same emotion were consecutive.</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption><p>Scheme of the experimental protocol.</p></caption>
<graphic xlink:href="fnins-15-608156-g001.tif"/>
</fig>
<p>The study was carefully explained to all participants and to their parents, who signed an informed consent to the participation. The study was approved by the Bambino Ges&#x00F9; Pediatric Hospital Ethic Committee, protocol 705/FS, and was conducted according to the principles outlined in the Declaration of Helsinki of 1975, as revised in 2000.</p>
</sec>
<sec id="S2.SS3">
<title>EEG</title>
<p>A digital EEG system (BE plus EBNeuro, Italy) was used to record 16 EEG channels (Fp, Fz, F3, F4, F7, F8, T7, T8, P3, P4, P7, P8, O1, O2) according to the international 10/20 system, with a sampling frequency of 256 Hz. The impedances were maintained below 10 k&#x03A9;, and a 50-Hz notch filter was applied to remove the power interference. A ground electrode was placed on the forehead and reference electrodes on earlobes. The EEG signal was initially bandpass filtered with a fifth-order Butterworth filter (high-pass filter: cutoff frequency fc = 1 Hz; low-pass filter: cutoff frequency fc = 40 Hz). Because we could not apply independent component analysis because of the low number of EEG channels (i.e., 16), we used a regression-based method to identify and correct eye-blinks artifacts. In particular, the Fpz channel was used to identify and remove eye-blink artifacts by the REBLINCA algorithm (<xref ref-type="bibr" rid="B17">Di Flumeri et al., 2016</xref>). This method allows the EEG signal to be corrected without losing data. For other sources of artifacts (e.g., environmental noise, user movements, etc.), specific procedures of the EEGLAB toolbox were employed (<xref ref-type="bibr" rid="B15">Delorme and Makeig, 2004</xref>). In particular, the EEG dataset was first segmented into epochs of 2 s through moving windows shifted by 0.125 s. This windowing was chosen with the compromise of having a high number of observations, in comparison with the number of variables, and in order to respect the condition of stationarity of the EEG signal. This is in fact a necessary assumption in order to proceed with the spectral analysis of the signal. Successively, three criteria were applied to those EEG epochs (<xref ref-type="bibr" rid="B4">Aric&#x00F2; et al., 2017</xref>; <xref ref-type="bibr" rid="B7">Borghini et al., 2017</xref>): (i) threshold criterion (amplitudes exceeding &#x00B1; 100 &#x03BC;V); (ii) trend criterion (slope higher than 10 &#x03BC;V/s); and (iii) sample-to-sample criterion (sample-to-sample amplitude difference &#x003E;25 &#x03BC;V).</p>
<p>All EEG epochs marked as &#x201C;artifact&#x201D; were removed in order to have a clean EEG signal. In order to accurately define EEG bands of interest, for each participant the individual alpha frequency (IAF) was computed on a closed-eyes segment recorded prior to the experimental task. Thus, the EEG was filtered in the following frequency bands: theta [IAF &#x2212; 6 &#x00F7; IAF &#x2212; 2 Hz], alpha [IAF &#x2212; 2 &#x00F7; IAF + 2 Hz], beta [IAF + 2 &#x00F7; IAF + 16 Hz], and gamma [IAF + 16 &#x00F7; IAF + 25 Hz] (<xref ref-type="bibr" rid="B33">Klimesch, 1999</xref>). EEG recordings were segmented into trials, corresponding to audio stimulus listening and target picture matching. The power spectrum density was calculated in correspondence of the different conditions with a frequency resolution of 0.5 Hz. Trials were normalized by subtracting the open-eyes activity recorded before the beginning of the experimental task.</p>
</sec>
<sec id="S2.SS4">
<title>Lateralization Index</title>
<p>The lateralization index (LI) was calculated in order to assess the relative asymmetry between the two cerebral hemispheres&#x2019; activity during the task execution (audio stimuli perception and target visual stimuli matching), as the right hemisphere theory for emotion predicts a relative higher right activation during emotional stimuli processing.</p>
<p>The LI was calculated on the basis of the formula previously adopted by <xref ref-type="bibr" rid="B64">Vanvooren et al. (2015)</xref>:</p>
<disp-formula id="S2.Ex1">
<mml:math id="M1">
<mml:mrow>
<mml:mrow>
<mml:mtext mathvariant="bold">L</mml:mtext>
<mml:mo>&#x2062;</mml:mo>
<mml:mi>I</mml:mi>
</mml:mrow>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mtext mathvariant="bold">R</mml:mtext>
<mml:mo>-</mml:mo>
<mml:mtext mathvariant="bold">S</mml:mtext>
</mml:mrow>
<mml:mtext mathvariant="bold">RS</mml:mtext>
</mml:mfrac>
</mml:mrow>
</mml:math>
</disp-formula>
<p>where <italic>R</italic> stands for right hemisphere, and <italic>L</italic> for left hemisphere. The LI ranges from +1, for cortical activity entirely asymmetrical to the right hemisphere, to zero for symmetrical cortical activity, and &#x2212;1 for cortical activity entirely asymmetrical to the left hemisphere. For the right hemisphere activity calculation, the estimation from the following electrodes was averaged: F4, F8, T8, P4, P8, O2, whereas for the left hemisphere. It was averaged from the following ones: F3, F7, T7, P3, P7, O1. The LI was already employed on hearing-impaired children, in particular, SSD children, finding an asymmetry in cortical activity during the execution of a word in noise recognition task influenced by the direction of the background noise in SSD but not in NH children (<xref ref-type="bibr" rid="B11">Cartocci et al., 2019</xref>).</p>
</sec>
<sec id="S2.SS5">
<title>Statistical Analysis</title>
<p>Both the percentage of correct responses and LI data were compared between the NH and UCI groups through analysis of variance (ANOVA) with two factors: GROUP (2 levels: NH and UCI) and EMOTIONAL STATE (3 levels: positive, negative, and neutral). A simple regression analysis was performed for investigating the relation between (i) the percentage of correct responses and the LI values, (ii) between the percentage of correct responses and the age at the test execution, and (iii) between the percentage of correct responses and the age at CI surgery.</p>
</sec>
</sec>
<sec id="S3">
<title>Results</title>
<p>Behavioral results evidenced a higher percentage of correct responses provided by NH children in comparison to UCI children (<italic>F</italic> = 18.898, <italic>p</italic> &#x003C; 0.001, partial &#x03B7;<sup>2</sup> = 0.270) (<xref ref-type="table" rid="T2">Table 2</xref>), but an effect of the emotional state was not seen (<italic>F</italic> = 1.890, <italic>p</italic> = 0.161, partial &#x03B7;<sup>2</sup> = 0.069), although for both groups the neutral cues were the most difficult to recognize. Neither the interaction between the variable group and emotional state (<italic>F</italic> = 0.032, <italic>p</italic> = 0.968, partial &#x03B7;<sup>2</sup> = 0.001) was observed (<xref ref-type="fig" rid="F2">Figure 2</xref>).</p>
<table-wrap position="float" id="T2">
<label>TABLE 2</label>
<caption><p>Mean percentages of correct responses &#x00B1; standard deviation for each group (UCI and NH) and for each emotional state.</p></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<td valign="top" align="left">Group</td>
<td valign="top" align="center">Negative</td>
<td valign="top" align="center">Neutral</td>
<td valign="top" align="center">Positive</td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">NH</td>
<td valign="top" align="center">86,58% &#x00B1; 9,82</td>
<td valign="top" align="center">78,33% &#x00B1; 18,92</td>
<td valign="top" align="center">88,33% &#x00B1; 10,17</td>
</tr>
<tr>
<td valign="top" align="left">UCI</td>
<td valign="top" align="center">65,05% &#x00B1; 19,37</td>
<td valign="top" align="center">58,24% &#x00B1; 22,17</td>
<td valign="top" align="center">69,67% &#x00B1; 21,02</td>
</tr>
</tbody>
</table></table-wrap>
<fig id="F2" position="float">
<label>FIGURE 2</label>
<caption><p>Percentage of correct responses reported by NH and UCI children. Bars describe means, and error bars describe standard deviations.</p></caption>
<graphic xlink:href="fnins-15-608156-g002.tif"/>
</fig>
<p>ANOVA results showed higher LI values, indicating a higher activity in gamma band in the right in comparison to the left hemisphere, in UCI in comparison to NH children (<italic>F</italic> = 58.656, <italic>p</italic> &#x003C; 0.001, partial &#x03B7;<sup>2</sup> = 0.535) (<xref ref-type="fig" rid="F3">Figure 3</xref>), irrespectively of the emotional state (negative, neutral, and positive) (<italic>F</italic> = 1.686, <italic>p</italic> = 0.195, partial &#x03B7;<sup>2</sup> = 0.062). Additionally, any interaction between the variable groups and emotional state was not found (<italic>F</italic> = 1.121, <italic>p</italic> = 0.333, partial &#x03B7;<sup>2</sup> = 0.042).</p>
<fig id="F3" position="float">
<label>FIGURE 3</label>
<caption><p>Gamma band activity LI in NH and UCI children. Bars describe means, and error bars describe standard deviations.</p></caption>
<graphic xlink:href="fnins-15-608156-g003.tif"/>
</fig>
<p>A negative correlation was observed between LI gamma values and the percentage of correct responses (<italic>F</italic> = 11.801, <italic>p</italic> = 0.001, <italic>r</italic> = &#x2212;0.420, partial &#x03B7;<sup>2</sup> = 0.177) (<xref ref-type="fig" rid="F4">Figure 4</xref>).</p>
<fig id="F4" position="float">
<label>FIGURE 4</label>
<caption><p>Correlation between the lateralization (LI) gamma values and percentage of correct responses for both the UCI and NH groups. Dark dots represent NH values, and green dots represent UCI values.</p></caption>
<graphic xlink:href="fnins-15-608156-g004.tif"/>
</fig>
<p>Additionally, for the UCI group, any difference between the CI side and the deaf contralateral side in the gamma activity was not shown (<italic>F</italic> = 0.598, <italic>p</italic> = 0.212, partial &#x03B7;<sup>2</sup> = 0.032) (<xref ref-type="fig" rid="F5">Figure 5A</xref>), but a higher gamma activity in the right in comparison to the left hemisphere was found (<italic>F</italic> = 54.552, <italic>p</italic> &#x003C; 0.001, partial &#x03B7;<sup>2</sup> = 0.532) (<xref ref-type="fig" rid="F5">Figure 5B</xref>).</p>
<fig id="F5" position="float">
<label>FIGURE 5</label>
<caption><p>Comparison between gamma activity in the UCI group with respect to the UCI side <bold>(A)</bold> and right or left side <bold>(B)</bold>. Bars describe means, and error bars describe standard deviations.</p></caption>
<graphic xlink:href="fnins-15-608156-g005.tif"/>
</fig>
<p>For the UCI group, no correlation was found between the age of the UCI children at the moment of the experiment and the percentage of correct responses (<italic>F</italic> = 0.052, <italic>p</italic> = 0.821, <italic>r</italic> = 0.046, partial &#x03B7;<sup>2</sup> = 0.002), similarly to the NH children group (<italic>F</italic> = 1.130, <italic>p</italic> = 0.297, <italic>r</italic> = 0.197, partial &#x03B7;<sup>2</sup> = 0.039). Additionally, a negative correlation was shown between the age at the CI surgery and the percentage of correct response reported by UCI children (<italic>F</italic> = 7.030, <italic>p</italic> = 0.014, <italic>r</italic> = 0.468, partial &#x03B7;<sup>2</sup> = 0.219) (<xref ref-type="fig" rid="F6">Figure 6</xref>). Finally, when calculating the mean of the correct responses for each participant, irrespective of the emotional states, despite the lack of significance (<italic>F</italic> = 3.056, <italic>p</italic> = 0.124, <italic>r</italic> = &#x2212;0.551, partial &#x03B7;<sup>2</sup> = 0.304), a higher percentage of correct responses was highlighted, higher than 70%, only in early implanted children, that is, before 3.5 years of age (<xref ref-type="fig" rid="F6">Figure 6</xref>, black dots).</p>
<fig id="F6" position="float">
<label>FIGURE 6</label>
<caption><p>Correlation between age at CI surgery and percentage of correct responses in UCI children. Orange dots stand for positive emotional states; blue dots stand for negative emotional states, and gray dots stand for neutral emotional states. Black dots stand for the mean of correct responses for each participant, irrespective of the emotional state. The vertical green lines represent the sensitive period threshold (3.5 and 7 years old) for the central auditory system development (<xref ref-type="bibr" rid="B59">Sharma et al., 2005</xref>).</p></caption>
<graphic xlink:href="fnins-15-608156-g006.tif"/>
</fig>
</sec>
<sec id="S4">
<title>Discussion</title>
<p>According to literature, the lower percentage of correct responses provided by UCI children in comparison to NH children highlights their impairment in vocal emotion recognition skills (<xref ref-type="bibr" rid="B1">Agrawal et al., 2013</xref>; <xref ref-type="bibr" rid="B69">Wiefferink et al., 2013</xref>; <xref ref-type="bibr" rid="B12">Chatterjee et al., 2015</xref>; <xref ref-type="bibr" rid="B30">Jiam et al., 2017</xref>; <xref ref-type="bibr" rid="B2">Ahmed et al., 2018</xref>; <xref ref-type="bibr" rid="B48">Paquette et al., 2018</xref>). This would be strongly related to the preverbal and periverbal deafness acquisition. In fact, in a study employing emotional vocal stimuli in adult CI users, such performance difference was not shown (<xref ref-type="bibr" rid="B16">Deroche et al., 2019</xref>). Furthermore, there are evidences of different strategies implemented by CI and NH listeners for emotional stimuli recognition, more based on pitch range cues in the former and more relying on mean pitch in the latter group (<xref ref-type="bibr" rid="B23">Gilbers et al., 2015</xref>). In addition, such deficit in emotion recognition in UCI children in comparison to NH children appears strictly related to the matter of social interaction and social development (<xref ref-type="bibr" rid="B30">Jiam et al., 2017</xref>); in fact, a correlation between impairments in perception and production of voice emotion was found, like in the case of infant-directed speech, and in 5- to 13-year-old children who used CI (<xref ref-type="bibr" rid="B45">Nakata et al., 2012</xref>). It is interesting to note that a previous study employing vocal child-directed happy and sad speech stimuli reported higher performance in NH in comparison to CI using children; however, the percentage of recognition was higher than the one reported in the present study, probably due to the child-directed characteristic of the stimuli (<xref ref-type="bibr" rid="B65">Volkova et al., 2013</xref>).</p>
<p>Concerning the difference in gamma LI values observed in UCI in comparison to the NH group, it confirmed a difference in gamma band activity previously reported by <xref ref-type="bibr" rid="B1">Agrawal et al. (2013)</xref> in comparison between the same groups, therefore supporting the suitability of the study of gamma rhythms in the investigation of emotional messages conveyed by means of auditory stimuli. However, the previously mentioned study and the present study are not perfectly comparable because of the differences (i) in the sample &#x2013; adults and children, respectively, &#x2013; and therefore plausibly in the etiology of deafness; (ii) in the location of EEG activity acquisition, that is, Cz and multiple electrodes over the two hemispheres, respectively; and (iii) in the kind of emotional stimuli, that is, verbal stimuli pronounced with neutral, happy, and angry prosody in Agrawal and colleagues&#x2019; study, while vocal nonverbal stimuli belonging to 10 emotions grouped into three emotional states in the present study. Moreover, the higher LI values reported for UCI in comparison to NH children would imply a more sustained conscious processing of the stimuli for the NH group in comparison to the UCI group and a higher processing of the emotional face stimuli &#x2013; employed for the matching of the auditory stimuli for the identification of the target emotion &#x2013; by the UCI group (<xref ref-type="bibr" rid="B5">Balconi and Lucchiari, 2008</xref>). In fact, McGurk studies showed a higher relying of UCI children on the visual sensation than on the auditory one in case of uncertainty (<xref ref-type="bibr" rid="B57">Schorr et al., 2005</xref>).</p>
<p>The correlation between higher right lateralization, as indexed by higher LI values, and the percentage of correct responses could be explained by the evidence of higher activation and asymmetry levels in poorer performers in emotion-in-voice recognition tasks than those of more proficient ones (<xref ref-type="bibr" rid="B32">Kislova and Rusalova, 2009</xref>). This possibly also reflects the poorer performance in emotion recognition obtained by UCI children, as well as their higher LI values in comparison to NH children. In fact, it was shown by studies on single hemisphere damage that although the right hemisphere is responsible for low-level discrimination and recognition of affective prosody, in case of higher task demands in terms of associational-cognitive requirements, the left hemisphere is engaged (<xref ref-type="bibr" rid="B63">Tompkins and Flowers, 1985</xref>). Thus, UCI children would present deficits in such engaging of the left hemisphere for more complex emotional processing tasks. This could be explained by the neuroimaging evidence that indeed areas appearing to be primarily involved in emotional prosodic processing, that is, posterior temporal (parietal) brain regions (<xref ref-type="bibr" rid="B34">Kotz et al., 2006</xref>), are the same areas presumably more involved by the neuroplastic changes that occurred after CI surgery (<xref ref-type="bibr" rid="B25">Giraud et al., 2000</xref>; <xref ref-type="bibr" rid="B31">Kang et al., 2004</xref>) and the following hearing sensation restoration.</p>
<p>The negative correlation between age of implantation and percentage of correct responses in emotion recognition is in accordance with previous studies (<xref ref-type="bibr" rid="B40">Mancini et al., 2016</xref>). On the contrary, in the Deroche and colleagues&#x2019; study on adult CI users cited above, any effect of the age at implantation on the emotion recognition was not found, but this would be caused by the post-lingual acquisition of deafness in the majority of the sample (19 over 22 CI users) and by the type of emotions investigated, which is happy, sad, and neutral, whereas in the present study, 10 emotions were employed (<xref ref-type="bibr" rid="B16">Deroche et al., 2019</xref>). Furthermore, in <xref ref-type="bibr" rid="B65">Volkova et al.&#x2019; (2013</xref>) study, employing child-directed emotional speech, performance of the children CI users was positively associated with duration of implant use. Such evidence could be compared to present results, given the almost overlap between age at CI surgery and length of CI use in the enrolled sample. In addition, the trend that better performances were obtained by children implanted before 3.5 years old suggests the influence of a sensitive period, identified through P1 cortical auditory-evoked potential trajectory post-CI development (<xref ref-type="bibr" rid="B60">Sharma et al., 2002</xref>, <xref ref-type="bibr" rid="B59">2005</xref>; <xref ref-type="bibr" rid="B58">Sharma and Dorman, 2006</xref>; <xref ref-type="bibr" rid="B38">Kral and Sharma, 2012</xref>; <xref ref-type="bibr" rid="B35">Kral et al., 2019</xref>) also on emotion recognition skills development. Such phenomenon could be explained by the better auditory&#x2013;visual integration achieved by children implanted before 3.5 years of age as shown by Miller&#x2019;s test of the race model inequality executed by early and late implanted children (<xref ref-type="bibr" rid="B24">Gilley et al., 2010</xref>). Such auditory&#x2013;visual integration capability achievement is also witnessed by McGurk effect tests on CI children, showing that 38% of early implanted children &#x2013; before the age of 2.5 years &#x2013; but none of the late implanted children exhibited the bimodal fusion occurring in the McGurk effect, being instead biased toward the visual modality in contrast to the NH children who were biased toward the audio modality (<xref ref-type="bibr" rid="B57">Schorr et al., 2005</xref>). These evidences, with respect to the topic of emotion recognition skills development, are in accord to studies indicating that auditory and visual integration is necessary for the achievement of such capabilities (<xref ref-type="bibr" rid="B8">Campanella and Belin, 2007</xref>). In relation to this matter, there is also the evidence of a delay on facial emotion recognition in preschoolers using CI (and hearing aids) in comparison to NH mates, and interestingly, there was not any correlation between facial emotion recognition and language abilities (<xref ref-type="bibr" rid="B67">Wang et al., 2011</xref>). Differently, another study found a relation between better language skills and higher social competence, both in NH and CI children, although in the latter group, less adequate emotion-regulation strategies and less social competence than NH children were highlighted (<xref ref-type="bibr" rid="B68">Wiefferink et al., 2012</xref>). In addition, a study investigating both linguistic (recognition of monosyllabic words and of key words from sentences within background noise; repetition of non-words) and indexical (discrimination of across-gender and within-gender talkers; identification of emotional content from spoken sentences) properties in perceptual analysis of speech in CI children found an association between better performances in such feature recognition and a younger age at implantation (and use of more novel speech processor technology) (<xref ref-type="bibr" rid="B20">Geers et al., 2013</xref>).</p>
<p>Moreover, concerning the emotional communication, a suggestion of deficits also in the imitation of emotional (happy and sad) speech stimuli was found (<xref ref-type="bibr" rid="B66">Wang et al., 2013</xref>). Therefore, it sharply results in the vision and need of two targets of rehabilitation for children with CI that should be treated both conjointly and separately: language treatment and emotional intervention.</p>
</sec>
<sec id="S5">
<title>Conclusion</title>
<p>In light of the present results, in relation to the experimental questions previously declared, it is possible to conclude that (i) the processing of the emotional stimuli by deaf children using CI appears to be different from NH children, as suggested by the higher relative right hemisphere gamma band activity, possibly explained by the non-physiological development of the auditory system; (ii) on account of the inverse correlation between the age at the CI surgery and the percentage of correct responses, the precocity of performing the CI surgery for the attainment of best emotion recognition skills appears crucial, probably because of neuroplastic changes allowing a better processing and categorization of emotional stimuli; and (iii) the CI side does not appear to influence the processing of emotional stimuli, although interestingly the relative higher gamma band activity appears to be counterproductive in terms of emotion recognition performances; such aspect needs further investigation at the light of the possible particular implications of the right hemisphere hypothesis (<xref ref-type="bibr" rid="B34">Kotz et al., 2006</xref>).</p>
</sec>
<sec id="S6">
<title>Data Availability Statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec id="S7">
<title>Ethics Statement</title>
<p>The studies involving human participants were reviewed and approved by the Bambino Ges&#x00F9; Pediatric Hospital Ethics Committee. Written informed consent to participate in this study was provided by the participants&#x2019; legal guardian/next of kin.</p>
</sec>
<sec id="S8">
<title>Author Contributions</title>
<p>GC conceived and conducted the study, performed the data analysis, and wrote the manuscript. AG and BI prepared the experimental protocol, conducted the study, and elaborated data. AS, SGi, AD, SGa, RG, CL, PL, and FF enrolled patients and organized experimental sessions. PM provided support for the organization and realization of the study. AS and FB edited the manuscript. FB supervised the entire experiment. All authors read and approved the final version of the article.</p>
</sec>
<sec sec-type="COI-statement" id="conf1">
<title>Conflict of Interest</title>
<p>GC, AG, BI, and FB were employed by BrainSigns Srl. PM was employed by Cochlear Italia Srl. The remaining authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
</body>
<back>
<fn-group>
<fn fn-type="financial-disclosure">
<p><bold>Funding.</bold> This research was funded by Cochlear Srl: IIR-1983.</p>
</fn>
</fn-group>
<ack>
<p>We acknowledge the contribution of participants and their families to the study, who kindly voluntarily granted their availability.</p>
</ack>
<ref-list>
<title>References</title>
<ref id="B1"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Agrawal</surname> <given-names>D.</given-names></name> <name><surname>Thorne</surname> <given-names>J. D.</given-names></name> <name><surname>Viola</surname> <given-names>F. C.</given-names></name> <name><surname>Timm</surname> <given-names>L.</given-names></name> <name><surname>Debener</surname> <given-names>S.</given-names></name> <name><surname>B&#x00FC;chner</surname> <given-names>A.</given-names></name><etal/></person-group> (<year>2013</year>). <article-title>Electrophysiological responses to emotional prosody perception in cochlear implant users.</article-title> <source><italic>NeuroImage</italic></source> <volume>2</volume> <fpage>229</fpage>&#x2013;<lpage>238</lpage>. <pub-id pub-id-type="doi">10.1016/j.nicl.2013.01.001</pub-id> <pub-id pub-id-type="pmid">24179776</pub-id></citation></ref>
<ref id="B2"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ahmed</surname> <given-names>D. G.</given-names></name> <name><surname>Paquette</surname> <given-names>S.</given-names></name> <name><surname>Zeitouni</surname> <given-names>A.</given-names></name> <name><surname>Lehmann</surname> <given-names>A.</given-names></name></person-group> (<year>2018</year>). <article-title>Neural processing of musical and vocal emotions through cochlear implants simulation.</article-title> <source><italic>Clin. EEG Neurosci.</italic></source> <volume>49</volume> <fpage>143</fpage>&#x2013;<lpage>151</lpage>. <pub-id pub-id-type="doi">10.1177/1550059417733386</pub-id> <pub-id pub-id-type="pmid">28958161</pub-id></citation></ref>
<ref id="B3"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Akhtar</surname> <given-names>N.</given-names></name> <name><surname>Gernsbacher</surname> <given-names>M. A.</given-names></name></person-group> (<year>2008</year>). <article-title>On privileging the role of gaze in infant social cognition.</article-title> <source><italic>Child Dev. Perspect.</italic></source> <volume>2</volume> <fpage>59</fpage>&#x2013;<lpage>65</lpage>. <pub-id pub-id-type="doi">10.1111/j.1750-8606.2008.00044.x</pub-id> <pub-id pub-id-type="pmid">25520748</pub-id></citation></ref>
<ref id="B4"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Aric&#x00F2;</surname> <given-names>P.</given-names></name> <name><surname>Borghini</surname> <given-names>G.</given-names></name> <name><surname>Di Flumeri</surname> <given-names>G.</given-names></name> <name><surname>Sciaraffa</surname> <given-names>N.</given-names></name> <name><surname>Colosimo</surname> <given-names>A.</given-names></name> <name><surname>Babiloni</surname> <given-names>F.</given-names></name></person-group> (<year>2017</year>). <article-title>Passive BCI in operational environments: insights, recent advances, and future trends.</article-title> <source><italic>IEEE Trans. Biomed. Eng.</italic></source> <volume>64</volume> <fpage>1431</fpage>&#x2013;<lpage>1436</lpage>. <pub-id pub-id-type="doi">10.1109/TBME.2017.2694856</pub-id> <pub-id pub-id-type="pmid">28436837</pub-id></citation></ref>
<ref id="B5"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Balconi</surname> <given-names>M.</given-names></name> <name><surname>Lucchiari</surname> <given-names>C.</given-names></name></person-group> (<year>2008</year>). <article-title>Consciousness and arousal effects on emotional face processing as revealed by brain oscillations. a gamma band analysis.</article-title> <source><italic>Int. J. Psychophysiol.</italic></source> <volume>67</volume> <fpage>41</fpage>&#x2013;<lpage>46</lpage>. <pub-id pub-id-type="doi">10.1016/j.ijpsycho.2007.10.002</pub-id> <pub-id pub-id-type="pmid">17997495</pub-id></citation></ref>
<ref id="B6"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Baldwin</surname> <given-names>D. A.</given-names></name> <name><surname>Moses</surname> <given-names>L. J.</given-names></name></person-group> (<year>1996</year>). <article-title>The ontogeny of social information gathering.</article-title> <source><italic>Child Dev.</italic></source> <volume>67</volume> <fpage>1915</fpage>&#x2013;<lpage>1939</lpage>. <pub-id pub-id-type="doi">10.1111/j.1467-8624.1996.tb01835.x</pub-id></citation></ref>
<ref id="B7"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Borghini</surname> <given-names>G.</given-names></name> <name><surname>Aric&#x00F2;</surname> <given-names>P.</given-names></name> <name><surname>Di Flumeri</surname> <given-names>G.</given-names></name> <name><surname>Cartocci</surname> <given-names>G.</given-names></name> <name><surname>Colosimo</surname> <given-names>A.</given-names></name> <name><surname>Bonelli</surname> <given-names>S.</given-names></name><etal/></person-group> (<year>2017</year>). <article-title>EEG-Based cognitive control behaviour assessment: an ecological study with professional air traffic controllers.</article-title> <source><italic>Sci. Rep.</italic></source> <volume>7</volume>:<issue>547</issue>. <pub-id pub-id-type="doi">10.1038/s41598-017-00633-7</pub-id> <pub-id pub-id-type="pmid">28373684</pub-id></citation></ref>
<ref id="B8"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Campanella</surname> <given-names>S.</given-names></name> <name><surname>Belin</surname> <given-names>P.</given-names></name></person-group> (<year>2007</year>). <article-title>Integrating face and voice in person perception.</article-title> <source><italic>Trends Cogn. Sci.</italic></source> <volume>11</volume> <fpage>535</fpage>&#x2013;<lpage>543</lpage>. <pub-id pub-id-type="doi">10.1016/j.tics.2007.10.001</pub-id> <pub-id pub-id-type="pmid">17997124</pub-id></citation></ref>
<ref id="B9"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cartocci</surname> <given-names>G.</given-names></name> <name><surname>Maglione</surname> <given-names>A. G.</given-names></name> <name><surname>Rossi</surname> <given-names>D.</given-names></name> <name><surname>Modica</surname> <given-names>E.</given-names></name> <name><surname>Borghini</surname> <given-names>G.</given-names></name> <name><surname>Malerba</surname> <given-names>P.</given-names></name><etal/></person-group> (<year>2018</year>). &#x201C;<article-title>Alpha and theta EEG variations as indices of listening effort to be implemented in neurofeedback among cochlear implant users</article-title>,&#x201D; in <source><italic>Symbiotic Interaction.</italic> Symbiotic 2017. Lecture Notes in Computer Science</source>, <role>eds</role> <person-group person-group-type="editor"><name><surname>Jaap</surname> <given-names>H.</given-names></name> <name><surname>Spagnolli</surname> <given-names>A.</given-names></name> <name><surname>Blankertz</surname> <given-names>B.</given-names></name> <name><surname>Gamberini</surname> <given-names>L.</given-names></name> <name><surname>Jacucci</surname> <given-names>G.</given-names></name></person-group> (<publisher-loc>Cham</publisher-loc>: <publisher-name>Springer International Publishing</publisher-name>), <fpage>30</fpage>&#x2013;<lpage>41</lpage>.</citation></ref>
<ref id="B10"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cartocci</surname> <given-names>G.</given-names></name> <name><surname>Maglione</surname> <given-names>A. G.</given-names></name> <name><surname>Vecchiato</surname> <given-names>G.</given-names></name> <name><surname>Di Flumeri</surname> <given-names>G.</given-names></name> <name><surname>Colosimo</surname> <given-names>A.</given-names></name> <name><surname>Scorpecci</surname> <given-names>A.</given-names></name><etal/></person-group> (<year>2015</year>). &#x201C;<article-title>Mental workload estimations in unilateral deafened children</article-title>,&#x201D; in <source><italic>Proceedings of the 2015 37th Annual International Conference of the IEEE Engineering in Medicine and Biology Society (EMBC)</italic></source>, <publisher-loc>Milan</publisher-loc>, <fpage>1654</fpage>&#x2013;<lpage>1657</lpage>. <pub-id pub-id-type="doi">10.1109/EMBC.2015.7318693</pub-id> <pub-id pub-id-type="pmid">26736593</pub-id></citation></ref>
<ref id="B11"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cartocci</surname> <given-names>G.</given-names></name> <name><surname>Scorpecci</surname> <given-names>A.</given-names></name> <name><surname>Borghini</surname> <given-names>G.</given-names></name> <name><surname>Maglione</surname> <given-names>A.</given-names></name> <name><surname>Inguscio</surname> <given-names>B. M. S.</given-names></name> <name><surname>Giannantonio</surname> <given-names>S.</given-names></name><etal/></person-group> (<year>2019</year>). <article-title>EEG rhythms lateralization patterns in children with unilateral hearing loss are different from the patterns of normal hearing controls during speech-in-noise listening.</article-title> <source><italic>Hear. Res.</italic></source> <volume>379</volume> <fpage>31</fpage>&#x2013;<lpage>42</lpage>. <pub-id pub-id-type="doi">10.1016/j.heares.2019.04.011</pub-id> <pub-id pub-id-type="pmid">31042607</pub-id></citation></ref>
<ref id="B12"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chatterjee</surname> <given-names>M.</given-names></name> <name><surname>Zion</surname> <given-names>D.</given-names></name> <name><surname>Deroche</surname> <given-names>M. L.</given-names></name> <name><surname>Burianek</surname> <given-names>B.</given-names></name> <name><surname>Limb</surname> <given-names>C.</given-names></name> <name><surname>Goren</surname> <given-names>A.</given-names></name><etal/></person-group> (<year>2015</year>). <article-title>Voice emotion recognition by cochlear-implanted children and their normally-hearing peers.</article-title> <source><italic>Hear. Res.</italic></source> <volume>322</volume> <fpage>151</fpage>&#x2013;<lpage>162</lpage>. <pub-id pub-id-type="doi">10.1016/j.heares.2014.10.003</pub-id> <pub-id pub-id-type="pmid">25448167</pub-id></citation></ref>
<ref id="B13"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Damm</surname> <given-names>S. A.</given-names></name> <name><surname>Sis</surname> <given-names>J. L.</given-names></name> <name><surname>Kulkarni</surname> <given-names>A. M.</given-names></name> <name><surname>Chatterjee</surname> <given-names>M.</given-names></name></person-group> (<year>2019</year>). <article-title>How vocal emotions produced by children with cochlear implants are perceived by their hearing peers.</article-title> <source><italic>J. Speech Lang. Hear. Research</italic></source> <volume>62</volume> <fpage>3728</fpage>&#x2013;<lpage>3740</lpage>. <pub-id pub-id-type="doi">10.1044/2019_JSLHR-S-18-0497</pub-id></citation></ref>
<ref id="B14"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Davies-Thompson</surname> <given-names>J.</given-names></name> <name><surname>Elli</surname> <given-names>G. V.</given-names></name> <name><surname>Rezk</surname> <given-names>M.</given-names></name> <name><surname>Benetti</surname> <given-names>S.</given-names></name> <name><surname>van Ackeren</surname> <given-names>M.</given-names></name> <name><surname>Collignon</surname> <given-names>O.</given-names></name></person-group> (<year>2019</year>). <article-title>Hierarchical brain network for face and voice integration of emotion expression.</article-title> <source><italic>Cereb. Cortex</italic></source> <volume>29</volume> <fpage>3590</fpage>&#x2013;<lpage>3605</lpage>. <pub-id pub-id-type="doi">10.1093/cercor/bhy240</pub-id> <pub-id pub-id-type="pmid">30272134</pub-id></citation></ref>
<ref id="B15"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Delorme</surname> <given-names>A.</given-names></name> <name><surname>Makeig</surname> <given-names>S.</given-names></name></person-group> (<year>2004</year>). <article-title>EEGLAB: an open source toolbox for analysis of single-trial EEG dynamics including independent component analysis.</article-title> <source><italic>J. Neurosci. Methods</italic></source> <volume>134</volume> <fpage>9</fpage>&#x2013;<lpage>21</lpage>. <pub-id pub-id-type="doi">10.1016/j.jneumeth.2003.10.009</pub-id> <pub-id pub-id-type="pmid">15102499</pub-id></citation></ref>
<ref id="B16"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Deroche</surname> <given-names>M. L. D.</given-names></name> <name><surname>Felezeu</surname> <given-names>M.</given-names></name> <name><surname>Paquette</surname> <given-names>S.</given-names></name> <name><surname>Zeitouni</surname> <given-names>A.</given-names></name> <name><surname>Lehmann</surname> <given-names>A.</given-names></name></person-group> (<year>2019</year>). <article-title>Neurophysiological differences in emotional processing by cochlear implant users, extending beyond the realm of speech.</article-title> <source><italic>Ear Hear.</italic></source> <volume>40</volume> <fpage>1197</fpage>&#x2013;<lpage>1209</lpage>. <pub-id pub-id-type="doi">10.1097/AUD.0000000000000701</pub-id> <pub-id pub-id-type="pmid">30762600</pub-id></citation></ref>
<ref id="B17"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Di Flumeri</surname> <given-names>G.</given-names></name> <name><surname>Aric&#x00F3;</surname> <given-names>P.</given-names></name> <name><surname>Borghini</surname> <given-names>G.</given-names></name> <name><surname>Colosimo</surname> <given-names>A.</given-names></name> <name><surname>Babiloni</surname> <given-names>F.</given-names></name></person-group> (<year>2016</year>). &#x201C;<article-title>A new regression-based method for the eye blinks artifacts correction in the EEG signal, without using any EOG channel</article-title>,&#x201D; in <source><italic>Proceedings of the 2016 38th Annual International Conference of the IEEE Engineering in Medicine and Biology Society (EMBC)</italic></source>, <publisher-loc>Orlando, FL</publisher-loc>, <fpage>3187</fpage>&#x2013;<lpage>3190</lpage>. <pub-id pub-id-type="doi">10.1109/EMBC.2016.7591406</pub-id> <pub-id pub-id-type="pmid">28268985</pub-id></citation></ref>
<ref id="B18"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>eHopyan</surname> <given-names>T.</given-names></name> <name><surname>ePeretz</surname> <given-names>I.</given-names></name> <name><surname>Chan</surname> <given-names>L. P.</given-names></name> <name><surname>Papsin</surname> <given-names>B. C.</given-names></name> <name><surname>Gordon</surname> <given-names>K. A.</given-names></name></person-group> (<year>2012</year>). <article-title>Children using cochlear implants capitalize on acoustical hearing for music perception.</article-title> <source><italic>Front. Psychol.</italic></source> <volume>3</volume>:<issue>425</issue>. <pub-id pub-id-type="doi">10.3389/fpsyg.2012.00425</pub-id> <pub-id pub-id-type="pmid">23133430</pub-id></citation></ref>
<ref id="B19"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gainotti</surname> <given-names>G.</given-names></name></person-group> (<year>2019</year>). <article-title>Emotions and the right hemisphere: can new data clarify old models?</article-title> <source><italic>Neuroscientist</italic></source> <volume>25</volume> <fpage>258</fpage>&#x2013;<lpage>270</lpage>. <pub-id pub-id-type="doi">10.1177/1073858418785342</pub-id> <pub-id pub-id-type="pmid">29985120</pub-id></citation></ref>
<ref id="B20"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Geers</surname> <given-names>A. E.</given-names></name> <name><surname>Davidson</surname> <given-names>L. S.</given-names></name> <name><surname>Uchanski</surname> <given-names>R. M.</given-names></name> <name><surname>Nicholas</surname> <given-names>J. G.</given-names></name></person-group> (<year>2013</year>). <article-title>Interdependence of linguistic and indexical speech perception skills in school-age children with early cochlear implantation.</article-title> <source><italic>Ear Hear.</italic></source> <volume>34</volume> <fpage>562</fpage>&#x2013;<lpage>574</lpage>. <pub-id pub-id-type="doi">10.1097/AUD.0b013e31828d2bd6</pub-id> <pub-id pub-id-type="pmid">23652814</pub-id></citation></ref>
<ref id="B21"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gfeller</surname> <given-names>K.</given-names></name> <name><surname>Turner</surname> <given-names>C.</given-names></name> <name><surname>Mehr</surname> <given-names>M.</given-names></name> <name><surname>Woodworth</surname> <given-names>G.</given-names></name> <name><surname>Fearn</surname> <given-names>R.</given-names></name> <name><surname>Knutson</surname> <given-names>J. F.</given-names></name><etal/></person-group> (<year>2002</year>). <article-title>Recognition of familiar melodies by adult cochlear implant recipients and normal-hearing adults.</article-title> <source><italic>Cochlear Implants Int.</italic></source> <volume>3</volume> <fpage>29</fpage>&#x2013;<lpage>53</lpage>. <pub-id pub-id-type="doi">10.1179/cim.2002.3.1.29</pub-id> <pub-id pub-id-type="pmid">18792110</pub-id></citation></ref>
<ref id="B22"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gfeller</surname> <given-names>K.</given-names></name> <name><surname>Turner</surname> <given-names>C.</given-names></name> <name><surname>Oleson</surname> <given-names>J.</given-names></name> <name><surname>Zhang</surname> <given-names>X.</given-names></name> <name><surname>Gantz</surname> <given-names>B.</given-names></name> <name><surname>Froman</surname> <given-names>R.</given-names></name><etal/></person-group> (<year>2007</year>). <article-title>Accuracy of cochlear implant recipients on pitch perception, melody recognition, and speech reception in noise.</article-title> <source><italic>Ear Hear.</italic></source> <volume>28</volume> <fpage>412</fpage>&#x2013;<lpage>423</lpage>. <pub-id pub-id-type="doi">10.1097/AUD.0b013e3180479318</pub-id> <pub-id pub-id-type="pmid">17485990</pub-id></citation></ref>
<ref id="B23"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gilbers</surname> <given-names>S.</given-names></name> <name><surname>Fuller</surname> <given-names>C.</given-names></name> <name><surname>Gilbers</surname> <given-names>D.</given-names></name> <name><surname>Broersma</surname> <given-names>M.</given-names></name> <name><surname>Goudbeek</surname> <given-names>M.</given-names></name> <name><surname>Free</surname> <given-names>R.</given-names></name><etal/></person-group> (<year>2015</year>). <article-title>Normal-Hearing listeners&#x2019; and cochlear implant users&#x2019; perception of pitch cues in emotional speech.</article-title> <source><italic>IPerception</italic></source> <volume>6</volume> <issue>0301006615599139</issue>. <pub-id pub-id-type="doi">10.1177/0301006615599139</pub-id> <pub-id pub-id-type="pmid">27648210</pub-id></citation></ref>
<ref id="B24"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gilley</surname> <given-names>P. M.</given-names></name> <name><surname>Sharma</surname> <given-names>A.</given-names></name> <name><surname>Mitchell</surname> <given-names>T. V.</given-names></name> <name><surname>Dorman</surname> <given-names>M. F.</given-names></name></person-group> (<year>2010</year>). <article-title>The influence of a sensitive period for auditory-visual integration in children with cochlear implants.</article-title> <source><italic>Restor. Neurol. Neurosci.</italic></source> <volume>28</volume> <fpage>207</fpage>&#x2013;<lpage>218</lpage>. <pub-id pub-id-type="doi">10.3233/RNN-2010-0525</pub-id> <pub-id pub-id-type="pmid">20404409</pub-id></citation></ref>
<ref id="B25"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Giraud</surname> <given-names>A. L.</given-names></name> <name><surname>Truy</surname> <given-names>E.</given-names></name> <name><surname>Frackowiak</surname> <given-names>R. S.</given-names></name> <name><surname>Gr&#x00E9;goire</surname> <given-names>M. C.</given-names></name> <name><surname>Pujol</surname> <given-names>J. F.</given-names></name> <name><surname>Collet</surname> <given-names>L.</given-names></name></person-group> (<year>2000</year>). <article-title>Differential recruitment of the speech processing system in healthy subjects and rehabilitated cochlear implant patients.</article-title> <source><italic>Brain</italic></source> <volume>123(Pt 7)</volume> <fpage>1391</fpage>&#x2013;<lpage>1402</lpage>. <pub-id pub-id-type="doi">10.1093/brain/123.7.1391</pub-id> <pub-id pub-id-type="pmid">10869051</pub-id></citation></ref>
<ref id="B26"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gordon</surname> <given-names>K.</given-names></name> <name><surname>Henkin</surname> <given-names>Y.</given-names></name> <name><surname>Kral</surname> <given-names>A.</given-names></name></person-group> (<year>2015</year>). <article-title>Asymmetric hearing during development: the aural preference syndrome and treatment options.</article-title> <source><italic>Pediatrics</italic></source> <volume>136</volume> <fpage>141</fpage>&#x2013;<lpage>153</lpage>. <pub-id pub-id-type="doi">10.1542/peds.2014-3520</pub-id> <pub-id pub-id-type="pmid">26055845</pub-id></citation></ref>
<ref id="B27"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gordon</surname> <given-names>K. A.</given-names></name> <name><surname>Papsin</surname> <given-names>B. C.</given-names></name></person-group> (<year>2019</year>). <article-title>Special edition on unilateral deafness and hearing loss: an introduction and overview.</article-title> <source><italic>Hear. Res.</italic></source> <volume>372</volume> <fpage>1</fpage>&#x2013;<lpage>2</lpage>. <pub-id pub-id-type="doi">10.1016/j.heares.2018.12.007</pub-id> <pub-id pub-id-type="pmid">30642608</pub-id></citation></ref>
<ref id="B28"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Grossmann</surname> <given-names>T.</given-names></name></person-group> (<year>2010</year>). <article-title>The development of emotion perception in face and voice during infancy.</article-title> <source><italic>Restor. Neurol. Neurosci.</italic></source> <volume>28</volume> <fpage>219</fpage>&#x2013;<lpage>236</lpage>. <pub-id pub-id-type="doi">10.3233/RNN-2010-0499</pub-id> <pub-id pub-id-type="pmid">20404410</pub-id></citation></ref>
<ref id="B29"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hopyan-Misakyan</surname> <given-names>T. M.</given-names></name> <name><surname>Gordon</surname> <given-names>K. A.</given-names></name> <name><surname>Dennis</surname> <given-names>M.</given-names></name> <name><surname>Papsin</surname> <given-names>B. C.</given-names></name></person-group> (<year>2009</year>). <article-title>Recognition of affective speech prosody and facial affect in deaf children with unilateral right cochlear implants.</article-title> <source><italic>Child Neuropsychol.</italic></source> <volume>15</volume> <fpage>136</fpage>&#x2013;<lpage>146</lpage>. <pub-id pub-id-type="doi">10.1080/09297040802403682</pub-id> <pub-id pub-id-type="pmid">18828045</pub-id></citation></ref>
<ref id="B30"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jiam</surname> <given-names>N. T.</given-names></name> <name><surname>Caldwell</surname> <given-names>M.</given-names></name> <name><surname>Deroche</surname> <given-names>M. L.</given-names></name> <name><surname>Chatterjee</surname> <given-names>M.</given-names></name> <name><surname>Limb</surname> <given-names>C. J.</given-names></name></person-group> (<year>2017</year>). <article-title>Voice emotion perception and production in cochlear implant users.</article-title> <source><italic>Hear. Res.</italic></source> <volume>352</volume> <fpage>30</fpage>&#x2013;<lpage>39</lpage>. <pub-id pub-id-type="doi">10.1016/j.heares.2017.01.006</pub-id> <pub-id pub-id-type="pmid">28088500</pub-id></citation></ref>
<ref id="B31"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kang</surname> <given-names>E.</given-names></name> <name><surname>Lee</surname> <given-names>D. S.</given-names></name> <name><surname>Kang</surname> <given-names>H.</given-names></name> <name><surname>Lee</surname> <given-names>J. S.</given-names></name> <name><surname>Oh</surname> <given-names>S. H.</given-names></name> <name><surname>Lee</surname> <given-names>M. C.</given-names></name><etal/></person-group> (<year>2004</year>). <article-title>Neural changes associated with speech learning in deaf children following cochlear implantation.</article-title> <source><italic>NeuroImage</italic></source> <volume>22</volume> <fpage>1173</fpage>&#x2013;<lpage>1181</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2004.02.036</pub-id> <pub-id pub-id-type="pmid">15219589</pub-id></citation></ref>
<ref id="B32"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kislova</surname> <given-names>O. O.</given-names></name> <name><surname>Rusalova</surname> <given-names>M. N.</given-names></name></person-group> (<year>2009</year>). <article-title>EEG asymmetry in humans: relationship with success in recognizing emotions in the voice.</article-title> <source><italic>Neurosci. Behav. Physiol.</italic></source> <volume>39</volume> <fpage>825</fpage>&#x2013;<lpage>831</lpage>. <pub-id pub-id-type="doi">10.1007/s11055-009-9213-8</pub-id> <pub-id pub-id-type="pmid">19830567</pub-id></citation></ref>
<ref id="B33"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Klimesch</surname> <given-names>W.</given-names></name></person-group> (<year>1999</year>). <article-title>EEG alpha and theta oscillations reflect cognitive and memory performance: a review and analysis.</article-title> <source><italic>Brain Res. Brain Res. Rev.</italic></source> <volume>29</volume> <fpage>169</fpage>&#x2013;<lpage>195</lpage>.</citation></ref>
<ref id="B34"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kotz</surname> <given-names>S. A.</given-names></name> <name><surname>Meyer</surname> <given-names>M.</given-names></name> <name><surname>Paulmann</surname> <given-names>S.</given-names></name></person-group> (<year>2006</year>). &#x201C;<article-title>Lateralization of emotional prosody in the brain: an overview and synopsis on the impact of study design</article-title>,&#x201D; in <source><italic>Progress in Brain Research. Understanding Emotions</italic></source>, <volume>Vol. 156</volume> <role>eds</role> <person-group person-group-type="editor"><name><surname>Anders</surname> <given-names>S.</given-names></name> <name><surname>Ende</surname> <given-names>G.</given-names></name> <name><surname>Junghofer</surname> <given-names>M.</given-names></name> <name><surname>Kissler</surname> <given-names>J.</given-names></name> <name><surname>Wildgruber</surname> <given-names>D.</given-names></name></person-group> (<publisher-loc>Amsterdam</publisher-loc>: <publisher-name>Elsevier</publisher-name>), <fpage>285</fpage>&#x2013;<lpage>294</lpage>. <pub-id pub-id-type="doi">10.1016/S0079-6123(06)56015-7</pub-id></citation></ref>
<ref id="B35"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kral</surname> <given-names>A.</given-names></name> <name><surname>Dorman</surname> <given-names>M. F.</given-names></name> <name><surname>Wilson</surname> <given-names>B. S.</given-names></name></person-group> (<year>2019</year>). <article-title>Neuronal development of hearing and language: cochlear implants and critical periods.</article-title> <source><italic>Annu. Rev. Neurosci.</italic></source> <volume>42</volume> <fpage>47</fpage>&#x2013;<lpage>65</lpage>. <pub-id pub-id-type="doi">10.1146/annurev-neuro-080317-061513</pub-id> <pub-id pub-id-type="pmid">30699049</pub-id></citation></ref>
<ref id="B36"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kral</surname> <given-names>A.</given-names></name> <name><surname>Hartmann</surname> <given-names>R.</given-names></name> <name><surname>Tillein</surname> <given-names>J.</given-names></name> <name><surname>Heid</surname> <given-names>S.</given-names></name> <name><surname>Klinke</surname> <given-names>R.</given-names></name></person-group> (<year>2001</year>). <article-title>Delayed maturation and sensitive periods in the auditory cortex.</article-title> <source><italic>Audiol. Neurotol.</italic></source> <volume>6</volume> <fpage>346</fpage>&#x2013;<lpage>362</lpage>. <pub-id pub-id-type="doi">10.1159/000046845</pub-id> <pub-id pub-id-type="pmid">11847463</pub-id></citation></ref>
<ref id="B37"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kral</surname> <given-names>A.</given-names></name> <name><surname>Hubka</surname> <given-names>P.</given-names></name> <name><surname>Heid</surname> <given-names>S.</given-names></name> <name><surname>Tillein</surname> <given-names>J.</given-names></name></person-group> (<year>2013</year>). <article-title>Single-Sided deafness leads to unilateral aural preference within an early sensitive period.</article-title> <source><italic>Brain</italic></source> <volume>136(Pt 1)</volume> <fpage>180</fpage>&#x2013;<lpage>193</lpage>. <pub-id pub-id-type="doi">10.1093/brain/aws305</pub-id> <pub-id pub-id-type="pmid">23233722</pub-id></citation></ref>
<ref id="B38"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kral</surname> <given-names>A.</given-names></name> <name><surname>Sharma</surname> <given-names>A.</given-names></name></person-group> (<year>2012</year>). <article-title>Developmental neuroplasticity after cochlear implantation.</article-title> <source><italic>Trends Neurosci.</italic></source> <volume>35</volume> <fpage>111</fpage>&#x2013;<lpage>122</lpage>. <pub-id pub-id-type="doi">10.1016/j.tins.2011.09.004</pub-id> <pub-id pub-id-type="pmid">22104561</pub-id></citation></ref>
<ref id="B39"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>M.</given-names></name> <name><surname>Lu</surname> <given-names>B.-L.</given-names></name></person-group> (<year>2009</year>). &#x201C;<article-title>Emotion classification based on gamma-band EEG</article-title>,&#x201D; in <source><italic>Proceedigs of the 2009 Annual International Conference of the IEEE Engineering in Medicine and Biology Society</italic></source>, <publisher-loc>Minneapolis, MN</publisher-loc>, <fpage>1223</fpage>&#x2013;<lpage>1226</lpage>. <pub-id pub-id-type="doi">10.1109/IEMBS.2009.5334139</pub-id> <pub-id pub-id-type="pmid">19964505</pub-id></citation></ref>
<ref id="B40"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mancini</surname> <given-names>P.</given-names></name> <name><surname>Giallini</surname> <given-names>I.</given-names></name> <name><surname>Prosperini</surname> <given-names>L.</given-names></name> <name><surname>D&#x2019;alessandro</surname> <given-names>H. D.</given-names></name> <name><surname>Guerzoni</surname> <given-names>L.</given-names></name> <name><surname>Murri</surname> <given-names>A.</given-names></name><etal/></person-group> (<year>2016</year>). <article-title>Level of emotion comprehension in children with mid to long term cochlear implant use: how basic and more complex emotion recognition relates to language and age at implantation.</article-title> <source><italic>Int. J. Pediatr. Otorhinolaryngol.</italic></source> <volume>87</volume> <fpage>219</fpage>&#x2013;<lpage>232</lpage>. <pub-id pub-id-type="doi">10.1016/j.ijporl.2016.06.033</pub-id> <pub-id pub-id-type="pmid">27368475</pub-id></citation></ref>
<ref id="B41"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Marsella</surname> <given-names>P.</given-names></name> <name><surname>Scorpecci</surname> <given-names>A.</given-names></name> <name><surname>Cartocci</surname> <given-names>G.</given-names></name> <name><surname>Giannantonio</surname> <given-names>S.</given-names></name> <name><surname>Maglione</surname> <given-names>A. G.</given-names></name> <name><surname>Venuti</surname> <given-names>I.</given-names></name><etal/></person-group> (<year>2017</year>). <article-title>EEG activity as an objective measure of cognitive load during effortful listening: a study on pediatric subjects with bilateral, asymmetric sensorineural hearing loss.</article-title> <source><italic>Int. J. Pediatr. Otorhinolaryngol.</italic></source> <volume>99</volume> <fpage>1</fpage>&#x2013;<lpage>7</lpage>. <pub-id pub-id-type="doi">10.1016/j.ijporl.2017.05.006</pub-id> <pub-id pub-id-type="pmid">28688548</pub-id></citation></ref>
<ref id="B42"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mildner</surname> <given-names>V.</given-names></name> <name><surname>Koska</surname> <given-names>T.</given-names></name></person-group> (<year>2014</year>). <article-title>Recognition and production of emotions in children with cochlear implants.</article-title> <source><italic>Clin. Linguist. Phon.</italic></source> <volume>28</volume> <fpage>543</fpage>&#x2013;<lpage>554</lpage>. <pub-id pub-id-type="doi">10.3109/02699206.2014.927000</pub-id> <pub-id pub-id-type="pmid">25000377</pub-id></citation></ref>
<ref id="B43"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Most</surname> <given-names>T.</given-names></name> <name><surname>Aviner</surname> <given-names>C.</given-names></name></person-group> (<year>2009</year>). <article-title>Auditory, visual, and auditory-visual perception of emotions by individuals with cochlear implants, hearing AIDS, and normal hearing.</article-title> <source><italic>J. Deaf Stud. Deaf Educ.</italic></source> <volume>14</volume> <fpage>449</fpage>&#x2013;<lpage>464</lpage>. <pub-id pub-id-type="doi">10.1093/deafed/enp007</pub-id> <pub-id pub-id-type="pmid">19398533</pub-id></citation></ref>
<ref id="B44"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Most</surname> <given-names>T.</given-names></name> <name><surname>Michaelis</surname> <given-names>H.</given-names></name></person-group> (<year>2012</year>). <article-title>Auditory, visual, and auditory-visual perceptions of emotions by young children with hearing loss versus children with normal hearing.</article-title> <source><italic>J. Speech Lang. Hear. Res. JSLHR</italic></source> <volume>55</volume> <fpage>1148</fpage>&#x2013;<lpage>1162</lpage>. <pub-id pub-id-type="doi">10.1044/1092-4388(2011/11-0060)</pub-id></citation></ref>
<ref id="B45"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Nakata</surname> <given-names>T.</given-names></name> <name><surname>Trehub</surname> <given-names>S. E.</given-names></name> <name><surname>Kanda</surname> <given-names>Y.</given-names></name></person-group> (<year>2012</year>). <article-title>Effect of cochlear implants on children&#x2019;s perception and production of speech prosody.</article-title> <source><italic>J. Acoust. Soc. Am.</italic></source> <volume>131</volume> <fpage>1307</fpage>&#x2013;<lpage>1314</lpage>. <pub-id pub-id-type="doi">10.1121/1.3672697</pub-id></citation></ref>
<ref id="B46"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Nowicki</surname> <given-names>S.</given-names></name> <name><surname>Duke</surname> <given-names>M. P.</given-names></name></person-group> (<year>1994</year>). <article-title>Individual differences in the nonverbal communication of affect: the diagnostic analysis of nonverbal accuracy scale.</article-title> <source><italic>J. Nonverbal Behav.</italic></source> <volume>18</volume> <fpage>9</fpage>&#x2013;<lpage>35</lpage>. <pub-id pub-id-type="doi">10.1007/BF02169077</pub-id></citation></ref>
<ref id="B47"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Orsini</surname> <given-names>A.</given-names></name> <name><surname>Grossi</surname> <given-names>D.</given-names></name> <name><surname>Capitani</surname> <given-names>E.</given-names></name> <name><surname>Laiacona</surname> <given-names>M.</given-names></name> <name><surname>Papagno</surname> <given-names>C.</given-names></name> <name><surname>Vallar</surname> <given-names>G.</given-names></name></person-group> (<year>1987</year>). <article-title>Verbal and spatial immediate memory span: normative data from 1355 adults and 1112 children.</article-title> <source><italic>Ital. J. Neurol. Sci.</italic></source> <volume>8</volume> <fpage>539</fpage>&#x2013;<lpage>548</lpage>. <pub-id pub-id-type="doi">10.1007/BF02333660</pub-id> <pub-id pub-id-type="pmid">3429213</pub-id></citation></ref>
<ref id="B48"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Paquette</surname> <given-names>S.</given-names></name> <name><surname>Ahmed</surname> <given-names>G. D.</given-names></name> <name><surname>Goffi-Gomez</surname> <given-names>M. V.</given-names></name> <name><surname>Hoshino</surname> <given-names>A. C. H.</given-names></name> <name><surname>Peretz</surname> <given-names>I.</given-names></name> <name><surname>Lehmann</surname> <given-names>A.</given-names></name></person-group> (<year>2018</year>). <article-title>Musical and vocal emotion perception for cochlear implants users.</article-title> <source><italic>Hear. Res.</italic></source> <volume>370</volume> <fpage>272</fpage>&#x2013;<lpage>282</lpage>. <pub-id pub-id-type="doi">10.1016/j.heares.2018.08.009</pub-id> <pub-id pub-id-type="pmid">30181063</pub-id></citation></ref>
<ref id="B49"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Piccioni</surname> <given-names>L. O.</given-names></name> <name><surname>Cartocci</surname> <given-names>G.</given-names></name> <name><surname>Maglione</surname> <given-names>A. G.</given-names></name> <name><surname>Modica</surname> <given-names>E.</given-names></name> <name><surname>Rossi</surname> <given-names>D.</given-names></name> <name><surname>Mancini</surname> <given-names>M.</given-names></name><etal/></person-group> (<year>2018</year>). <article-title>EEG variations as estimators of listening effort during recognition of words in noise in unilateral and bilateral sequential adult cochlear implant users.</article-title> <source><italic>J. Hear. Sci.</italic></source> <volume>8</volume> <fpage>116</fpage>&#x2013;<lpage>116</lpage>.</citation></ref>
<ref id="B50"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rouger</surname> <given-names>J.</given-names></name> <name><surname>Lagleyre</surname> <given-names>S.</given-names></name> <name><surname>Fraysse</surname> <given-names>B.</given-names></name> <name><surname>Deneve</surname> <given-names>S.</given-names></name> <name><surname>Deguine</surname> <given-names>O.</given-names></name> <name><surname>Barone</surname> <given-names>P.</given-names></name></person-group> (<year>2007</year>). <article-title>Evidence that cochlear-implanted deaf patients are better multisensory integrators.</article-title> <source><italic>Proc. Natl. Acad. Sci. U.S.A.</italic></source> <volume>104</volume> <fpage>7295</fpage>&#x2013;<lpage>7300</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.0609419104</pub-id> <pub-id pub-id-type="pmid">17404220</pub-id></citation></ref>
<ref id="B51"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sauter</surname> <given-names>D. A.</given-names></name> <name><surname>Crasborn</surname> <given-names>O.</given-names></name> <name><surname>Engels</surname> <given-names>T.</given-names></name> <name><surname>Kamilo&#x01E7;lu</surname> <given-names>R. G.</given-names></name> <name><surname>Sun</surname> <given-names>R.</given-names></name> <name><surname>Eisner</surname> <given-names>F.</given-names></name><etal/></person-group> (<year>2019</year>). <article-title>Human emotional vocalizations can develop in the absence of auditory learning.</article-title> <source><italic>Emotion</italic></source> <volume>20</volume> <fpage>1435</fpage>&#x2013;<lpage>1445</lpage>.</citation></ref>
<ref id="B52"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sauter</surname> <given-names>D. A.</given-names></name> <name><surname>Eisner</surname> <given-names>F.</given-names></name> <name><surname>Calder</surname> <given-names>A. J.</given-names></name> <name><surname>Scott</surname> <given-names>S. K.</given-names></name></person-group> (<year>2006</year>). <article-title>Perceptual cues in non-verbal vocal expressions of emotion.</article-title> <source><italic>Q. J. Exp. Psychol. (2006)</italic></source> <volume>63</volume> <fpage>2251</fpage>&#x2013;<lpage>2272</lpage>. <pub-id pub-id-type="doi">10.1080/17470211003721642</pub-id> <pub-id pub-id-type="pmid">20437296</pub-id></citation></ref>
<ref id="B53"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sauter</surname> <given-names>D. A.</given-names></name> <name><surname>Eisner</surname> <given-names>F.</given-names></name> <name><surname>Ekman</surname> <given-names>P.</given-names></name> <name><surname>Scott</surname> <given-names>S. K.</given-names></name></person-group> (<year>2010</year>). <article-title>Cross-Cultural recognition of basic emotions through nonverbal emotional vocalizations.</article-title> <source><italic>Proc. Natl. Acad. Sci. U.S.A.</italic></source> <volume>107</volume> <fpage>2408</fpage>&#x2013;<lpage>2412</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.0908239106</pub-id> <pub-id pub-id-type="pmid">20133790</pub-id></citation></ref>
<ref id="B54"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sauter</surname> <given-names>D. A.</given-names></name> <name><surname>Panattoni</surname> <given-names>C.</given-names></name> <name><surname>Happ&#x00E9;</surname> <given-names>F.</given-names></name></person-group> (<year>2013</year>). <article-title>Children&#x2019;s recognition of emotions from vocal cues.</article-title> <source><italic>Br. J. Dev. Psychol.</italic></source> <volume>31</volume> <fpage>97</fpage>&#x2013;<lpage>113</lpage>. <pub-id pub-id-type="doi">10.1111/j.2044-835X.2012.02081.x</pub-id> <pub-id pub-id-type="pmid">23331109</pub-id></citation></ref>
<ref id="B55"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Scherer</surname> <given-names>K. R.</given-names></name></person-group> (<year>2003</year>). <article-title>Vocal communication of emotion: a review of research paradigms.</article-title> <source><italic>Speech Commun.</italic></source> <volume>40</volume> <fpage>227</fpage>&#x2013;<lpage>256</lpage>. <pub-id pub-id-type="doi">10.1016/S0167-6393(02)00084-5</pub-id></citation></ref>
<ref id="B56"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Schorr</surname> <given-names>E.</given-names></name></person-group> (<year>2005</year>). <source><italic>Social and Emotional Functioning of Children with Cochlear Implants.</italic></source> Available online at: <ext-link ext-link-type="uri" xlink:href="http://drum.lib.umd.edu/handle/1903/2408">http://drum.lib.umd.edu/handle/1903/2408</ext-link> <comment>(accessed December 23, 2020)</comment>.</citation></ref>
<ref id="B57"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Schorr</surname> <given-names>E. A.</given-names></name> <name><surname>Fox</surname> <given-names>N. A.</given-names></name> <name><surname>van Wassenhove</surname> <given-names>V.</given-names></name> <name><surname>Knudsen</surname> <given-names>E. I.</given-names></name></person-group> (<year>2005</year>). <article-title>Auditory-Visual fusion in speech perception in children with cochlear implants.</article-title> <source><italic>Proc. Natl. Acad. Sci. U.S.A.</italic></source> <volume>102</volume> <fpage>18748</fpage>&#x2013;<lpage>18750</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.0508862102</pub-id> <pub-id pub-id-type="pmid">16339316</pub-id></citation></ref>
<ref id="B58"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sharma</surname> <given-names>A.</given-names></name> <name><surname>Dorman</surname> <given-names>M. F.</given-names></name></person-group> (<year>2006</year>). <article-title>Central auditory development in children with cochlear implants: clinical implications.</article-title> <source><italic>Adv. Otorhinolaryngol.</italic></source> <volume>64</volume> <fpage>66</fpage>&#x2013;<lpage>88</lpage>. <pub-id pub-id-type="doi">10.1159/000094646</pub-id> <pub-id pub-id-type="pmid">16891837</pub-id></citation></ref>
<ref id="B59"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sharma</surname> <given-names>A.</given-names></name> <name><surname>Dorman</surname> <given-names>M. F.</given-names></name> <name><surname>Kral</surname> <given-names>A.</given-names></name></person-group> (<year>2005</year>). <article-title>The influence of a sensitive period on central auditory development in children with unilateral and bilateral cochlear implants.</article-title> <source><italic>Hear. Res.</italic></source> <volume>203</volume> <fpage>134</fpage>&#x2013;<lpage>143</lpage>. <pub-id pub-id-type="doi">10.1016/j.heares.2004.12.010</pub-id> <pub-id pub-id-type="pmid">15855038</pub-id></citation></ref>
<ref id="B60"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sharma</surname> <given-names>A.</given-names></name> <name><surname>Dorman</surname> <given-names>M. F.</given-names></name> <name><surname>Spahr</surname> <given-names>A. J.</given-names></name></person-group> (<year>2002</year>). <article-title>A sensitive period for the development of the central auditory system in children with cochlear implants: implications for age of implantation.</article-title> <source><italic>Ear Hear.</italic></source> <volume>23</volume> <fpage>532</fpage>&#x2013;<lpage>539</lpage>.</citation></ref>
<ref id="B61"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Strelnikov</surname> <given-names>K.</given-names></name> <name><surname>Rouger</surname> <given-names>J.</given-names></name> <name><surname>Demonet</surname> <given-names>J.-F.</given-names></name> <name><surname>Lagleyre</surname> <given-names>S.</given-names></name> <name><surname>Fraysse</surname> <given-names>B.</given-names></name> <name><surname>Deguine</surname> <given-names>O.</given-names></name><etal/></person-group> (<year>2013</year>). <article-title>Visual activity predicts auditory recovery from deafness after adult cochlear implantation.</article-title> <source><italic>Brain</italic></source> <volume>136</volume> <fpage>3682</fpage>&#x2013;<lpage>3695</lpage>. <pub-id pub-id-type="doi">10.1093/brain/awt274</pub-id> <pub-id pub-id-type="pmid">24136826</pub-id></citation></ref>
<ref id="B62"><citation citation-type="journal"><collab>The Ear Foundation</collab> (<year>2017</year>). <source><italic>Cochlear Implant Information Sheet</italic></source>. <comment>(accessed October 14, 2019)</comment></citation></ref>
<ref id="B63"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tompkins</surname> <given-names>C. A.</given-names></name> <name><surname>Flowers</surname> <given-names>C. R.</given-names></name></person-group> (<year>1985</year>). <article-title>Perception of emotional intonation by brain-damaged adults: the influence of task processing levels.</article-title> <source><italic>J. Speech Hear. Res.</italic></source> <volume>28</volume> <fpage>527</fpage>&#x2013;<lpage>538</lpage>. <pub-id pub-id-type="doi">10.1044/jshr.2804.527</pub-id> <pub-id pub-id-type="pmid">4087888</pub-id></citation></ref>
<ref id="B64"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Vanvooren</surname> <given-names>S.</given-names></name> <name><surname>Hofmann</surname> <given-names>M.</given-names></name> <name><surname>Poelmans</surname> <given-names>H.</given-names></name> <name><surname>Ghesqui&#x00E8;re</surname> <given-names>P.</given-names></name> <name><surname>Wouters</surname> <given-names>J.</given-names></name></person-group> (<year>2015</year>). <article-title>Theta, beta and gamma rate modulations in the developing auditory system.</article-title> <source><italic>Hear. Res.</italic></source> <volume>327</volume> <fpage>153</fpage>&#x2013;<lpage>162</lpage>. <pub-id pub-id-type="doi">10.1016/j.heares.2015.06.011</pub-id> <pub-id pub-id-type="pmid">26117409</pub-id></citation></ref>
<ref id="B65"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Volkova</surname> <given-names>A.</given-names></name> <name><surname>Trehub</surname> <given-names>S. E.</given-names></name> <name><surname>Schellenberg</surname> <given-names>E. G.</given-names></name> <name><surname>Papsin</surname> <given-names>B. C.</given-names></name> <name><surname>Gordon</surname> <given-names>K. A.</given-names></name></person-group> (<year>2013</year>). <article-title>Children with bilateral cochlear implants identify emotion in speech and music.</article-title> <source><italic>Cochlear Implants Int.</italic></source> <volume>14</volume> <fpage>80</fpage>&#x2013;<lpage>91</lpage>. <pub-id pub-id-type="doi">10.1179/1754762812Y.0000000004</pub-id> <pub-id pub-id-type="pmid">23453221</pub-id></citation></ref>
<ref id="B66"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>D. J.</given-names></name> <name><surname>Trehub</surname> <given-names>S. E.</given-names></name> <name><surname>Volkova</surname> <given-names>A.</given-names></name> <name><surname>van Lieshout</surname> <given-names>P.</given-names></name></person-group> (<year>2013</year>). <article-title>Child implant users&#x2019; imitation of happy- and sad-sounding speech.</article-title> <source><italic>Front. Psychol.</italic></source> <volume>4</volume>:<issue>351</issue>. <pub-id pub-id-type="doi">10.3389/fpsyg.2013.00351</pub-id> <pub-id pub-id-type="pmid">23801976</pub-id></citation></ref>
<ref id="B67"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>Y.</given-names></name> <name><surname>Su</surname> <given-names>Y.</given-names></name> <name><surname>Fang</surname> <given-names>P.</given-names></name> <name><surname>Zhou</surname> <given-names>Q.</given-names></name></person-group> (<year>2011</year>). <article-title>Facial expression recognition: can preschoolers with cochlear implants and hearing aids catch it?</article-title> <source><italic>Res. Dev. Disabil.</italic></source> <volume>32</volume> <fpage>2583</fpage>&#x2013;<lpage>2588</lpage>. <pub-id pub-id-type="doi">10.1016/j.ridd.2011.06.019</pub-id> <pub-id pub-id-type="pmid">21807479</pub-id></citation></ref>
<ref id="B68"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wiefferink</surname> <given-names>C. H.</given-names></name> <name><surname>Rieffe</surname> <given-names>C.</given-names></name> <name><surname>Ketelaar</surname> <given-names>L.</given-names></name> <name><surname>Frijns</surname> <given-names>J. H. M.</given-names></name></person-group> (<year>2012</year>). <article-title>Predicting social functioning in children with a cochlear implant and in normal-hearing children: the role of emotion regulation.</article-title> <source><italic>Int. J. Pediatr. Otorhinolaryngol.</italic></source> <volume>76</volume> <fpage>883</fpage>&#x2013;<lpage>889</lpage>. <pub-id pub-id-type="doi">10.1016/j.ijporl.2012.02.065</pub-id> <pub-id pub-id-type="pmid">22459035</pub-id></citation></ref>
<ref id="B69"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wiefferink</surname> <given-names>C. H.</given-names></name> <name><surname>Rieffe</surname> <given-names>C.</given-names></name> <name><surname>Ketelaar</surname> <given-names>L.</given-names></name> <name><surname>Raeve</surname> <given-names>L. De</given-names></name> <name><surname>Frijns</surname> <given-names>J. H. M.</given-names></name></person-group> (<year>2013</year>). <article-title>Emotion understanding in deaf children with a cochlear implant.</article-title> <source><italic>J. Deaf Stud. Deaf Educ.</italic></source> <volume>18</volume> <fpage>175</fpage>&#x2013;<lpage>186</lpage>. <pub-id pub-id-type="doi">10.1093/deafed/ens042</pub-id> <pub-id pub-id-type="pmid">23232770</pub-id></citation></ref>
<ref id="B70"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Yang</surname> <given-names>K.</given-names></name> <name><surname>Tong</surname> <given-names>L.</given-names></name> <name><surname>Shu</surname> <given-names>J.</given-names></name> <name><surname>Zhuang</surname> <given-names>N.</given-names></name> <name><surname>Yan</surname> <given-names>B.</given-names></name> <name><surname>Zeng</surname> <given-names>Y.</given-names></name></person-group> (<year>2020</year>). <article-title>High gamma band EEG closely related to emotion: evidence from functional network.</article-title> <source><italic>Front. Hum. Neurosci.</italic></source> <volume>14</volume>:<issue>89</issue>. <pub-id pub-id-type="doi">10.3389/fnhum.2020.00089</pub-id> <pub-id pub-id-type="pmid">32265674</pub-id></citation></ref>
<ref id="B71"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Young</surname> <given-names>A. W.</given-names></name> <name><surname>Fr&#x00FC;hholz</surname> <given-names>S.</given-names></name> <name><surname>Schweinberger</surname> <given-names>S. R.</given-names></name></person-group> (<year>2020</year>). <article-title>Face and voice perception: understanding commonalities and differences.</article-title> <source><italic>Trends Cogn. Sci.</italic></source> <volume>24</volume> <fpage>398</fpage>&#x2013;<lpage>410</lpage>. <pub-id pub-id-type="doi">10.1016/j.tics.2020.02.001</pub-id> <pub-id pub-id-type="pmid">32298625</pub-id></citation></ref>
<ref id="B72"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zupan</surname> <given-names>B.</given-names></name> <name><surname>Sussman</surname> <given-names>J. E.</given-names></name></person-group> (<year>2009</year>). <article-title>Auditory preferences of young children with and without hearing loss for meaningful auditory&#x2013;visual compound stimuli.</article-title> <source><italic>J. Commun. Disord.</italic></source> <volume>42</volume> <fpage>381</fpage>&#x2013;<lpage>396</lpage>. <pub-id pub-id-type="doi">10.1016/j.jcomdis.2009.04.002</pub-id> <pub-id pub-id-type="pmid">19464698</pub-id></citation></ref>
</ref-list>
</back>
</article>