<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" article-type="case-report">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Hum. Neurosci.</journal-id>
<journal-title>Frontiers in Human Neuroscience</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Hum. Neurosci.</abbrev-journal-title>
<issn pub-type="epub">1662-5161</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fnhum.2022.870103</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Human Neuroscience</subject>
<subj-group>
<subject>Case Report</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Embodiment of a virtual prosthesis through training using an EMG-based human-machine interface: Case series</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name><surname>Rodrigues</surname> <given-names>Karina Aparecida</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1666512/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Moreira</surname> <given-names>Jo&#x00E3;o Vitor da Silva</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Pinheiro</surname> <given-names>Daniel Jos&#x00E9; Lins Leal</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/958480/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Dantas</surname> <given-names>Rodrigo Lantyer Marques</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1788154/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Santos</surname> <given-names>Tha&#x00ED;s Cardoso</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Nepomuceno</surname> <given-names>Jo&#x00E3;o Luiz Vieira</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1788166/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Nogueira</surname> <given-names>Maria Ang&#x00E9;lica Ratier Jajah</given-names></name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Cavalheiro</surname> <given-names>Esper Abr&#x00E3;o</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/170441/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Faber</surname> <given-names>Jean</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1198914/overview"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Neuroengineering and Neurocognition Laboratory, Paulista School of Medicine, Department of Neurology and Neurosurgery, Federal University of S&#x00E3;o Paulo</institution>, <addr-line>S&#x00E3;o Paulo</addr-line>, <country>Brazil</country></aff>
<aff id="aff2"><sup>2</sup><institution>Neuroengineering Laboratory, Department of Biomedical Engineering, Institute of Science and Technology, Federal University of S&#x00E3;o Paulo</institution>, <addr-line>S&#x00E3;o Jos&#x00E9; dos Campos</addr-line>, <country>Brazil</country></aff>
<aff id="aff3"><sup>3</sup><institution>Center of Rehabilitation Lucy Montoro</institution>, <addr-line>S&#x00E3;o Jos&#x00E9; dos Campos</addr-line>, <country>Brazil</country></aff>
<author-notes>
<fn fn-type="edited-by"><p>Edited by: Ren Xu, g.tec Medical Engineering GmbH, Austria</p></fn>
<fn fn-type="edited-by"><p>Reviewed by: Dalila Burin, Tohoku University, Japan; Usha Kuruganti, University of New Brunswick, Fredericton, Canada; Lauren Buck, Trinity College Dublin, Ireland; Cosima Prahm, University of T&#x00FC;bingen, Germany</p></fn>
<corresp id="c001">&#x002A;Correspondence: Karina Aparecida Rodrigues, <email>rodrigues.karina@unifesp.br</email></corresp>
<fn fn-type="other" id="fn004"><p>This article was submitted to Brain-Computer Interfaces, a section of the journal Frontiers in Human Neuroscience</p></fn>
</author-notes>
<pub-date pub-type="epub">
<day>04</day>
<month>08</month>
<year>2022</year>
</pub-date>
<pub-date pub-type="collection">
<year>2022</year>
</pub-date>
<volume>16</volume>
<elocation-id>870103</elocation-id>
<history>
<date date-type="received">
<day>05</day>
<month>02</month>
<year>2022</year>
</date>
<date date-type="accepted">
<day>06</day>
<month>07</month>
<year>2022</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2022 Rodrigues, Moreira, Pinheiro, Dantas, Santos, Nepomuceno, Nogueira, Cavalheiro and Faber.</copyright-statement>
<copyright-year>2022</copyright-year>
<copyright-holder>Rodrigues, Moreira, Pinheiro, Dantas, Santos, Nepomuceno, Nogueira, Cavalheiro and Faber</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p></license>
</permissions>
<abstract>
<p>Therapeutic strategies capable of inducing and enhancing prosthesis embodiment are a key point for better adaptation to and acceptance of prosthetic limbs. In this study, we developed a training protocol using an EMG-based human-machine interface (HMI) that was applied in the preprosthetic rehabilitation phase of people with amputation. This is a case series with the objective of evaluating the induction and enhancement of the embodiment of a virtual prosthesis. Six men and a woman with unilateral transfemoral traumatic amputation without previous use of prostheses participated in the study. Participants performed a training protocol with the EMG-based HMI, composed of six sessions held twice a week, each lasting 30 mins. This system consisted of myoelectric control of the movements of a virtual prosthesis immersed in a 3D virtual environment. Additionally, vibrotactile stimuli were provided on the participant&#x2019;s back corresponding to the movements performed. Embodiment was investigated from the following set of measurements: skin conductance response (affective measurement), crossmodal congruency effect (spatial perception measurement), ability to control the virtual prosthesis (motor measurement), and reports before and after the training. The increase in the skin conductance response in conditions where the virtual prosthesis was threatened, recalibration of the peripersonal space perception identified by the crossmodal congruency effect, ability to control the virtual prosthesis, and participant reports consistently showed the induction and enhancement of virtual prosthesis embodiment. Therefore, this protocol using EMG-based HMI was shown to be a viable option to achieve and enhance the embodiment of a virtual prosthetic limb.</p>
</abstract>
<kwd-group>
<kwd>virtual reality</kwd>
<kwd>amputee</kwd>
<kwd>prosthesis</kwd>
<kwd>embodiment</kwd>
<kwd>ownership</kwd>
<kwd>agency</kwd>
</kwd-group>
<contract-num rid="cn001">442563-2016/7</contract-num>
<contract-sponsor id="cn001">Conselho Nacional de Desenvolvimento Cient&#x00ED;fico e Tecnol&#x00F3;gico<named-content content-type="fundref-id">10.13039/501100003593</named-content></contract-sponsor>
<contract-sponsor id="cn002">Coordena&#x00E7;&#x00E3;o de Aperfei&#x00E7;oamento de Pessoal de N&#x00ED;vel Superior<named-content content-type="fundref-id">10.13039/501100002322</named-content></contract-sponsor>
<counts>
<fig-count count="5"/>
<table-count count="1"/>
<equation-count count="0"/>
<ref-count count="86"/>
<page-count count="16"/>
<word-count count="11078"/>
</counts>
</article-meta>
</front>
<body>
<sec id="S1" sec-type="intro">
<title>Introduction</title>
<p>The concept of embodiment of an external device can be defined as &#x201C;the ability to process properties of this object at the sensory, motor and/or affective levels in the same way that the properties of one&#x2019;s own body parts&#x201D; (<xref ref-type="bibr" rid="B21">De Vignemont, 2011</xref>; <xref ref-type="bibr" rid="B43">Makin et al., 2017</xref>), which can generate a sense of ownership and/or agency (<xref ref-type="bibr" rid="B11">Botvinick and Cohen, 1998</xref>). The sense of ownership refers to our ability to perceive our own body and to differentiate it from other bodies or objects using sensory information (<xref ref-type="bibr" rid="B77">Tsakiris et al., 2007a</xref>). The sense of agency, in contrast, is related to the perception of control of one&#x2019;s own body movements and distinguishing our actions from those of other people or objects (<xref ref-type="bibr" rid="B78">Tsakiris et al., 2007b</xref>). In this way, embodiment can induce perception of the extension of body limits, including assistive technology devices, such as wheelchairs in people with spinal cord injury (<xref ref-type="bibr" rid="B4">Arnhoff and Mehl, 1963</xref>), canes in blind people (<xref ref-type="bibr" rid="B66">Serino et al., 2007</xref>) or even prostheses in people with amputations (<xref ref-type="bibr" rid="B50">Mcdonnell et al., 1989</xref>; <xref ref-type="bibr" rid="B17">Canzoneri et al., 2013</xref>; <xref ref-type="bibr" rid="B57">Petrini et al., 2019</xref>).</p>
<p>Recent research has revealed that the prosthesis embodiment is a key point during the rehabilitation and adaptation after amputation (<xref ref-type="bibr" rid="B43">Makin et al., 2017</xref>; <xref ref-type="bibr" rid="B79">Van Den Heiligenberg et al., 2018</xref>; <xref ref-type="bibr" rid="B57">Petrini et al., 2019</xref>), bringing a series of benefits: more intuitive control, facilitation of learning (<xref ref-type="bibr" rid="B33">Imaizumi et al., 2016</xref>; <xref ref-type="bibr" rid="B43">Makin et al., 2017</xref>), restoration of the perception of bodily integrity (<xref ref-type="bibr" rid="B28">Graczyk et al., 2018</xref>; <xref ref-type="bibr" rid="B52">Middleton and Ortiz-Catalan, 2020</xref>), and assisting in the treatment of phantom pain and residual limb pain (<xref ref-type="bibr" rid="B7">Bekrater-Bodmann et al., 2021</xref>). These aspects together make possible a better physical, psychological, and cognitive adaptation, optimizing the rehabilitation process and acceptance of the prosthetic limb.</p>
<p>Several studies have corroborated this concept of embodiment, showing that people with amputation can better perceive the prosthesis when it is voluntarily controlled and/or provides somatosensory feedback (<xref ref-type="bibr" rid="B44">Marasco et al., 2011</xref>; <xref ref-type="bibr" rid="B60">Raspopovic et al., 2014</xref>; <xref ref-type="bibr" rid="B31">Hellman et al., 2015</xref>; <xref ref-type="bibr" rid="B81">Wijk and Carlsson, 2015</xref>; <xref ref-type="bibr" rid="B63">Schiefer et al., 2017</xref>; <xref ref-type="bibr" rid="B22">Dietrich et al., 2018</xref>; <xref ref-type="bibr" rid="B57">Petrini et al., 2019</xref>). Taking this into account, an EMG-based human-machine interface (HMI) is a type of system based on voluntary control and corresponded sensory feedback. This closed loop allows gradual and consistent learning of the individual&#x2019;s control ability (<xref ref-type="bibr" rid="B41">Lebedev and Nicolelis, 2017</xref>). Furthermore, it contains important aspects underlying the device embodiment, volition and sensory stimulation (<xref ref-type="bibr" rid="B21">De Vignemont, 2011</xref>; <xref ref-type="bibr" rid="B43">Makin et al., 2017</xref>). Thus, EMG-based HMI training provides a real-time paradigm to study the embodiment process and for use as a complementary therapeutic option.</p>
<p>The manner in which feedback is presented is a crucial factor for learning (<xref ref-type="bibr" rid="B70">Sitaram et al., 2017</xref>). An interesting option that has recently emerged is the use of virtual reality (VR). Protocols involving VR are applicable in different clinical contexts (<xref ref-type="bibr" rid="B9">Bohil et al., 2011</xref>; <xref ref-type="bibr" rid="B29">Gumma and Youssef, 2019</xref>; <xref ref-type="bibr" rid="B59">Qian et al., 2020</xref>), including as part of training before the use of the physical prosthesis, for people with amputations (<xref ref-type="bibr" rid="B39">Kluger et al., 2019</xref>). Furthermore, there is an extensive literature corroborating the embodiment of bodies, limbs, or virtual objects (<xref ref-type="bibr" rid="B18">Cole et al., 2009</xref>; <xref ref-type="bibr" rid="B72">Slater et al., 2009</xref>; <xref ref-type="bibr" rid="B65">Seng&#x00FC;l et al., 2012</xref>; <xref ref-type="bibr" rid="B67">Shokur et al., 2016</xref>; <xref ref-type="bibr" rid="B13">Buck et al., 2020</xref>). Considering that the learning acquired in a VR environment is transferable to the physical environment (<xref ref-type="bibr" rid="B14">Bunderson, 2014</xref>; <xref ref-type="bibr" rid="B29">Gumma and Youssef, 2019</xref>; <xref ref-type="bibr" rid="B39">Kluger et al., 2019</xref>; <xref ref-type="bibr" rid="B59">Qian et al., 2020</xref>), the induction of virtual prosthesis embodiment could help the process of training and adaptation to the use of a physical prosthesis.</p>
<p>Other sensory modalities, in addition to vision, can be used to provide physiological feedback, such as touch and hearing, either isolated or integrated (<xref ref-type="bibr" rid="B23">Donovan et al., 2016</xref>; <xref ref-type="bibr" rid="B67">Shokur et al., 2016</xref>). Vibrotactile stimulation on the residual limb of people with amputations represents a natural choice, with an optimal sensory transduction since it uses reminiscent peripheral sensory paths (<xref ref-type="bibr" rid="B24">Ehrsson et al., 2008</xref>; <xref ref-type="bibr" rid="B2">Antfolk et al., 2013</xref>; <xref ref-type="bibr" rid="B20">D&#x2019;Alonzo et al., 2015</xref>). However, due to practical or technical issues, it is not always possible to use these reminiscent areas of residual limbs, either because the residual region is too short, the surgery procedure damages a nerve fiber, or the area is not easily accessed. In these cases, it would be important to have alternative body regions that, once stimulated, would provide similar results of perception (<xref ref-type="bibr" rid="B36">Jones et al., 2009</xref>; <xref ref-type="bibr" rid="B67">Shokur et al., 2016</xref>).</p>
<p>In this study, we have developed an EMG-based HMI and training protocol, which aggregated previous findings in the literature that had yet to be applied and integrated in the clinical context during the rehabilitation of people with amputation: myoelectric control (<xref ref-type="bibr" rid="B48">Maruishi et al., 2004</xref>; <xref ref-type="bibr" rid="B64">Sebelius et al., 2005</xref>), VR environment (<xref ref-type="bibr" rid="B39">Kluger et al., 2019</xref>; <xref ref-type="bibr" rid="B59">Qian et al., 2020</xref>) and vibrotactile stimulation (<xref ref-type="bibr" rid="B24">Ehrsson et al., 2008</xref>; <xref ref-type="bibr" rid="B2">Antfolk et al., 2013</xref>; <xref ref-type="bibr" rid="B20">D&#x2019;Alonzo et al., 2015</xref>).</p>
<p>The EMG-based HMI was designed in a way that the participants could control the movements of a prosthesis immersed in a VR environment using the myoelectric activity of the residual limb, while receiving non-invasive vibrotactile stimuli applied on their back, which were mapped to represent the movements of the virtual prosthesis. The training was applied during the preprosthetic rehabilitation phase of people with transfemoral amputation. The hypothesis was that training with this EMG-based HMI, could induce and enhance virtual prosthesis embodiment.</p>
</sec>
<sec id="S2">
<title>Case description</title>
<p>This is a case series study that assessed the induction and enhancement of virtual prosthesis embodiment through a training protocol with an EMG-based HMI. Case Reports Guidelines were used to develop this work. The research protocol was approved by the Ethics and Research Committee of the Universidade Federal de S&#x00E3;o Paulo (n&#x00B0; 3.030.942) and of the Hospital Municipal Jos&#x00E9; de Carvalho Florence (n&#x00B0; 3.273.170), Brazil.</p>
<sec id="S2.SS1">
<title>Characterization of participants</title>
<p>For the inclusion of participants in the research, the following criteria were adopted: people with unilateral transfemoral amputation, both sexes, age between 18 and 46 years, and without previous use of prostheses. People who had open skin lesions on the residual limb or back, uncorrected visual impairment or associated neurological diseases were excluded from participation in the study. The participants provided written consent prior to the start of the study, and all ethical recommendations were followed.</p>
<p>Sociodemographic, physical, functional, cognitive, and psychological assessments of all participants were carried out to provide a general characterization (<xref ref-type="table" rid="T1">Table 1</xref>). Qualitative reports are provided in <xref ref-type="supplementary-material" rid="DS1">Supplementary Table 1</xref> in the <xref ref-type="supplementary-material" rid="DS1">Supplementary Material</xref>.</p>
<table-wrap position="float" id="T1">
<label>TABLE 1</label>
<caption><p>Characterization of the sociodemographic, physical, functional, cognitive, and psychological aspects of the participants.</p></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<td valign="top" align="left">Measures</td>
<td valign="top" align="center" colspan="7">Participants<hr/></td>
</tr>
<tr>
<td/>
<td valign="top" align="center">A</td>
<td valign="top" align="center">B</td>
<td valign="top" align="center">C</td>
<td valign="top" align="center">D</td>
<td valign="top" align="center">E</td>
<td valign="top" align="center">F</td>
<td valign="top" align="center">G</td>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left" colspan="8">&#x2006;<bold>Sociodemographic</bold></td>
</tr>
<tr>
<td valign="top" align="left">Age (years)</td>
<td valign="top" align="center">46</td>
<td valign="top" align="center">32</td>
<td valign="top" align="center">22</td>
<td valign="top" align="center">32</td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">46</td>
<td valign="top" align="center">41</td>
</tr>
<tr>
<td valign="top" align="left">Sex</td>
<td valign="top" align="center">Male</td>
<td valign="top" align="center">Male</td>
<td valign="top" align="center">Male</td>
<td valign="top" align="center">Male</td>
<td valign="top" align="center">Male</td>
<td valign="top" align="center">Female</td>
<td valign="top" align="center">Male</td>
</tr>
<tr>
<td valign="top" align="left">Education (years)</td>
<td valign="top" align="center">11</td>
<td valign="top" align="center">11</td>
<td valign="top" align="center">11</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">11</td>
<td valign="top" align="center">5</td>
</tr>
<tr>
<td valign="top" align="left" colspan="8">&#x2006;<bold>Physical and functional</bold></td>
</tr>
<tr>
<td valign="top" align="left">Height (m)</td>
<td valign="top" align="center">1.69</td>
<td valign="top" align="center">1.87</td>
<td valign="top" align="center">1.81</td>
<td valign="top" align="center">1.79</td>
<td valign="top" align="center">1.69</td>
<td valign="top" align="center">1.67</td>
<td valign="top" align="center">1.75</td>
</tr>
<tr>
<td valign="top" align="left">Body mass (kg)</td>
<td valign="top" align="center">80.9</td>
<td valign="top" align="center">64.5</td>
<td valign="top" align="center">60.0</td>
<td valign="top" align="center">88.3</td>
<td valign="top" align="center">46.0</td>
<td valign="top" align="center">78.7</td>
<td valign="top" align="center">67.0</td>
</tr>
<tr>
<td valign="top" align="left">Amputation time (months)</td>
<td valign="top" align="center">11</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">11</td>
<td valign="top" align="center">74</td>
<td valign="top" align="center">21</td>
<td valign="top" align="center">13</td>
<td valign="top" align="center">5</td>
</tr>
<tr>
<td valign="top" align="left">Amputation side</td>
<td valign="top" align="center">Right</td>
<td valign="top" align="center">Right</td>
<td valign="top" align="center">Left</td>
<td valign="top" align="center">Right</td>
<td valign="top" align="center">Left</td>
<td valign="top" align="center">Right</td>
<td valign="top" align="center">Right</td>
</tr>
<tr>
<td valign="top" align="left">Residual limb length (cm)<xref ref-type="table-fn" rid="t1fn1">&#x002A;<sup>1</sup></xref></td>
<td valign="top" align="center">34</td>
<td valign="top" align="center">35</td>
<td valign="top" align="center">37</td>
<td valign="top" align="center">13</td>
<td valign="top" align="center">20</td>
<td valign="top" align="center">37</td>
<td valign="top" align="center">30</td>
</tr>
<tr>
<td valign="top" align="left">Residual limb pain intensity<xref ref-type="table-fn" rid="t1fn2">&#x002A;<sup>2</sup></xref></td>
<td valign="top" align="center">8</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
</tr>
<tr>
<td valign="top" align="left">Phantom limb sensation</td>
<td valign="top" align="center">No</td>
<td valign="top" align="center">Yes</td>
<td valign="top" align="center">Yes</td>
<td valign="top" align="center">No</td>
<td valign="top" align="center">Yes</td>
<td valign="top" align="center">Yes</td>
<td valign="top" align="center">No</td>
</tr>
<tr>
<td valign="top" align="left">Functional level<xref ref-type="table-fn" rid="t1fn3">&#x002A;<sup>3</sup></xref></td>
<td valign="top" align="center">34</td>
<td valign="top" align="center">34</td>
<td valign="top" align="center">37</td>
<td valign="top" align="center">37</td>
<td valign="top" align="center">39</td>
<td valign="top" align="center">27</td>
<td valign="top" align="center">30</td>
</tr>
<tr>
<td valign="top" align="left">Physical activity level<xref ref-type="table-fn" rid="t1fn4">&#x002A;<sup>4</sup></xref></td>
<td valign="top" align="center">High level</td>
<td valign="top" align="center">Low level</td>
<td valign="top" align="center">High level</td>
<td valign="top" align="center">High level</td>
<td valign="top" align="center">High level</td>
<td valign="top" align="center">High level</td>
<td valign="top" align="center">High level</td>
</tr>
<tr>
<td valign="top" align="left" colspan="8">&#x2006;<bold>Residual limb hip muscle strength (kg/F)<xref ref-type="table-fn" rid="t1fn5"><sup>&#x002A;5</sup></xref></bold></td>
</tr>
<tr>
<td valign="top" align="left">Flexors</td>
<td valign="top" align="center">29.2</td>
<td valign="top" align="center">17.5</td>
<td valign="top" align="center">14.2</td>
<td valign="top" align="center">16.6</td>
<td valign="top" align="center">14.7</td>
<td valign="top" align="center">15.7</td>
<td valign="top" align="center">17.0</td>
</tr>
<tr>
<td valign="top" align="left">Extenders</td>
<td valign="top" align="center">20.5</td>
<td valign="top" align="center">9.6</td>
<td valign="top" align="center">13.9</td>
<td valign="top" align="center">12.8</td>
<td valign="top" align="center">12.9</td>
<td valign="top" align="center">14.1</td>
<td valign="top" align="center">12.8</td>
</tr>
<tr>
<td valign="top" align="left">Abductors</td>
<td valign="top" align="center">14.8</td>
<td valign="top" align="center">11.4</td>
<td valign="top" align="center">12.2</td>
<td valign="top" align="center">17.2</td>
<td valign="top" align="center">11.3</td>
<td valign="top" align="center">13.7</td>
<td valign="top" align="center">12.4</td>
</tr>
<tr>
<td valign="top" align="left">Adductors</td>
<td valign="top" align="center">15.0</td>
<td valign="top" align="center">10.1</td>
<td valign="top" align="center">12.2</td>
<td valign="top" align="center">&#x2013;<xref ref-type="table-fn" rid="t1fn8"><sup>&#x002A;&#x002A;</sup></xref></td>
<td valign="top" align="center">9.9</td>
<td valign="top" align="center">11.3</td>
<td valign="top" align="center">10.8</td>
</tr>
<tr>
<td valign="top" align="left" colspan="8">&#x2006;<bold>Cognitive and psychological</bold></td>
</tr>
<tr>
<td valign="top" align="left">Cognitive level<xref ref-type="table-fn" rid="t1fn6"><sup>&#x002A;6</sup></xref></td>
<td valign="top" align="center">26</td>
<td valign="top" align="center">28</td>
<td valign="top" align="center">23</td>
<td valign="top" align="center">23</td>
<td valign="top" align="center">20</td>
<td valign="top" align="center">29</td>
<td valign="top" align="center">18</td>
</tr>
<tr>
<td valign="top" align="left">Depression level<xref ref-type="table-fn" rid="t1fn7"><sup>&#x002A;7</sup></xref></td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">8</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">1</td>
</tr>
<tr>
<td valign="top" align="left">Anxiety level<xref ref-type="table-fn" rid="t1fn7"><sup>&#x002A;7</sup></xref></td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">3</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="t1fn1"><p>&#x002A;1 Residual limb measurement reference was made considering the distance from the greater trochanter of the femur to the distal extremity (<xref ref-type="bibr" rid="B56">Pedrinelli, 2004</xref>).</p></fn>
<fn id="t1fn2"><p>&#x002A;2 Numerical pain scale, where &#x2018;0&#x2019; indicates no pain and &#x2018;10&#x2019; indicates the worst pain (<xref ref-type="bibr" rid="B35">Jensen et al., 1986</xref>; <xref ref-type="bibr" rid="B30">Hawker et al., 2011</xref>).</p></fn>
<fn id="t1fn3"><p>&#x002A;3 The Amputee Mobility Predictor No Prosthesis (AMPnoPRO) assesses mobility aspects of amputees and predicts functional levels related to the use of prostheses (<xref ref-type="bibr" rid="B27">Gailey et al., 2002</xref>).</p></fn>
<fn id="t1fn4"><p>&#x002A;4 The International Physical Activity Questionnaire - short version (IPAQ) was used to assess the level of physical activity (<xref ref-type="bibr" rid="B49">Matsudo et al., 2001</xref>).</p></fn>
<fn id="t1fn5"><p>&#x002A;5 Measurement made using a digital dynamometer. The point of force application was considered the midpoint of the residual limb length. Three isometric contractions were performed for each muscle group, and the mean peak strength was calculated over the last 5 s of contraction (<xref ref-type="bibr" rid="B51">Mentiplay et al., 2015</xref>).</p></fn>
<fn id="t1fn6"><p>&#x002A;6 The Montreal Cognitive Assessment (MoCA) was used to assess cognitive functions (<xref ref-type="bibr" rid="B62">Sarmento, 2009</xref>).</p></fn>
<fn id="t1fn7"><p>&#x002A;7 The Hospital Anxiety and Depression (HAD) Scale was used to assess levels of anxiety and depression (<xref ref-type="bibr" rid="B10">Botega et al., 1995</xref>).</p></fn>
<fn id="t1fn8"><p><sup>&#x002A;&#x002A;</sup> For participant &#x201C;D&#x201D;, it was not possible to assess the strength of the adductor muscles due to the small size of the residual limb.</p></fn>
</table-wrap-foot>
</table-wrap>
<p><bold>Participant A:</bold> The limb was amputated on July 20, 2018, because of an accident involving a motorcycle and a truck. Due to trauma, there were multiple fractures and local infection, culminating in amputation.</p>
<p><bold>Participant B:</bold> The patient had a history of an accident involving a car and a motorcycle in the year of 2011, resulting in a lower limb injury. He was bedridden for approximately 2 years and used an external fixator for fracture treatment. In December 2018 arthrodesis of the knee was performed. However, due to the complications from osteomyelitis, amputation was performed on May 7, 2019.</p>
<p><bold>Participant C:</bold> The lower limb was amputated immediately after an accident involving a motorcycle and a truck on August 31, 2018.</p>
<p><bold>Participant D:</bold> Amputation was performed on June 13, 2013, due to extensive injury to the lower limb after an accident involving a motorcycle and a car. The patient also sustained a right forearm fracture during that same event.</p>
<p><bold>Participant E:</bold> Traumatic amputations occurred during an accident involving a motorcycle and a car on January 5, 2018, transfemoral in the lower limb and transradial in the upper limb left.</p>
<p><bold>Participant F:</bold> Surgical amputation was performed on September 18, 2018, after the patient was hit by a vehicle, resulting in crushing of the limb and vascular complications.</p>
<p><bold>Participant G:</bold> Amputation was performed on August 14, 2019, because of an accident between two motorcycles. In addition to the amputation, the patient had a fracture of the left femur that required a surgery for stabilization.</p>
</sec>
<sec id="S2.SS2">
<title>Intervention</title>
<sec id="S2.SS2.SSS1">
<title>EMG-based human-machine interface</title>
<p>The EMG-based HMI was designed to work using the electrophysiological activity of the muscle on the residual limb. Through a real-time recording and processing of this activity, the participants were able to control the knee movements of a virtual prosthesis while receiving patterns of vibrotactile stimulation on their back, representing the current position of the virtual prosthesis.</p>
</sec>
<sec id="S2.SS2.SSS2">
<title>Recording of muscle activity</title>
<p>The activity of the rectus femoris (hip flexor and knee extensor) and femoral biceps long head or semitendinosus muscles (hip extensors and knee flexors) (<xref ref-type="bibr" rid="B38">Kendall et al., 1995</xref>) on the residual limb was recorded using surface electromyography (EMG). Electrode placement for each muscle and participant was determined by applying excitomotor electrical current stimulation and visualizing the muscle contraction response. These positions were mapped for each person and used in all training sessions. Two channels of an Intan Technologies<sup>&#x00AE;</sup> chip were used to amplify the electrophysiological signals, and the chip was connected to the OpenEphys<sup>&#x00AE;</sup> analog-digital converter board in communication with its software (<xref ref-type="bibr" rid="B8">Black et al., 2017</xref>; <xref ref-type="bibr" rid="B68">Siegle et al., 2017</xref>). The electrophysiological signals were sampled at a rate of 10 kHz (<xref ref-type="fig" rid="F1">Figure 1A</xref>.1).</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption><p>EMG-based human-machine interface scheme. <bold>(A)</bold> Muscle activity recording through a surface EMG. <bold>(A.1)</bold> Illustration of the rectus femoris (RF) (hip flexor and knee extensor), femoral biceps long head (FB) and semitendinosus (ST) muscles (hip extensors and knee flexors) and positions of the surface electrodes on these muscles responsible for controlling the movements of the virtual prosthesis knee. <bold>(A.2)</bold> Schematic diagram of the real-time processing of electromyographic activity and root mean square (RMS) calculations to estimate the level of muscle contraction. The RMS was normalized to the maximum voluntary isometric contraction (MVIC) of each muscle. Regarding recognition of the movement direction, the activity of the agonist muscle should be twice as high as the average of the baseline signal, and the antagonist muscle could not exceed a threshold relative to the agonist, which was initially set at 80%. The recognized EMG patterns were mapped into visual and vibrotactile feedback. <bold>(B)</bold> Feedback. <bold>(B.1)</bold> Visual feedback. Avatar modeled with a transfemoral prosthesis and visualization from the first-person perspective are shown. The range of motion available to the prosthetic knee was set between 0&#x00B0; and 90&#x00B0;. <bold>(B.2)</bold> Vibrotactile feedback scheme. The positioning of vibrotactile actuators on the participant&#x2019;s back was organized in a 4 &#x00D7; 4 matrix. The paradigm for the applied vibratory stimuli was associated with the movements of the virtual prosthesis: upward vibration during knee extension and downward vibration during knee flexion. The vibratory intensity peak of a given row corresponded to a specific angle of knee movement (row A, 0&#x00B0;; B, 30&#x00B0;; C, 60&#x00B0;; and D, 90&#x00B0;), with an overlap of 30&#x00B0; between adjacent rows.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnhum-16-870103-g001.tif"/>
</fig>
</sec>
<sec id="S2.SS2.SSS3">
<title>Real-time processing</title>
<p>Data were processed using MATLAB<sup>&#x00AE;</sup> (R2017b). For real-time control, every 200 ms, the EMG signals in each channel were loaded in blocks of 5120 samples, resulting in a 60% overlap with the previous sample (<xref ref-type="bibr" rid="B54">Moreira et al., 2021</xref>). The samples were filtered using a twentieth-order IIR bandpass filter in the frequency range from 10 to 500 Hz and filtered at 60 (&#x00B1;2) Hz with its harmonics (<xref ref-type="bibr" rid="B75">Stegeman and Hermens, 2007</xref>). Then, the EMG signal in each window was resampled to 2 kHz, and its root mean square (RMS) was calculated to estimate the muscle contraction level (<xref ref-type="bibr" rid="B74">Staudenmann et al., 2010</xref>).</p>
<p>To control the feedback, two criteria needed to be satisfied: (a) Agonist muscle activation threshold. The RMS of the agonist muscle signals had to be greater than 2 standard deviations (SD) in relation to the baseline signal for the system to recognize the direction of movement (knee extension or flexion). (b) Tolerance of antagonist muscle contraction. Initially, the RMS of the antagonist muscle could not exceed 80% in relation to the agonist muscle (this parametrization was also used as a criterion for the progression in difficulty levels during training). Therefore, a higher level of EMG activity associated with the hip flexor muscle resulted in the knee extension movement of the virtual prosthesis and, simultaneously, in an upward vibrotactile stimulation pattern on the subject&#x2019;s back. A higher level of EMG activity associated with the hip extensor muscle resulted in knee flexion of the virtual prosthesis and, simultaneously, in a downward vibrotactile stimulation pattern on the subject&#x2019;s back (<xref ref-type="fig" rid="F1">Figure 1A</xref>.2). For more details about the definition of the vibrotactile stimulation pattern, see <xref ref-type="supplementary-material" rid="DS1">Supplementary Figure 1</xref> in the <xref ref-type="supplementary-material" rid="DS1">Supplementary Material</xref>.</p>
</sec>
<sec id="S2.SS2.SSS4">
<title>Virtual reality environment</title>
<p>The virtual environment was developed on the Unity3D<sup>&#x00AE;</sup> platform (2018.4). The environment was conceived to simulate a regular clinical room where the users could see themselves in a first-person perspective as a humanoid avatar using a transfemoral prosthesis in the corresponded lower limb. The subjects were able to control the knee extension and flexion movements of the prosthetic limb within a range between 0&#x00B0; and 90&#x00B0; (<xref ref-type="fig" rid="F1">Figure 1B</xref>.1). Moreover, the virtual environment was designed to enable gamification of the protocol with different stages and motivational messages to reinforce learning. The participants accessed the virtual environment using a Samsung<sup>&#x00AE;</sup> Odyssey Oculus Head-Mounted Display that provided a first-person view in a fixed sitting position (<xref ref-type="bibr" rid="B72">Slater et al., 2009</xref>, <xref ref-type="bibr" rid="B73">2010</xref>) and the ability to visually explore the whole 3D virtual environment. For more details about VR environment see <xref ref-type="supplementary-material" rid="DS1">Supplementary Material</xref> (<xref ref-type="supplementary-material" rid="DS1">Supplementary Table 2</xref>).</p>
</sec>
<sec id="S2.SS2.SSS5">
<title>Vibrotactile stimulation device</title>
<p>A total of 16 vibrotactile actuators (10 mm &#x00D7; 6 mm; 5 V-DC) were assembled in a 4 &#x00D7; 4 matrix and positioned on the subject&#x2019;s back (<xref ref-type="bibr" rid="B36">Jones et al., 2009</xref>), with an average distance of 6 cm among them. Vibrotactile stimulation was applied at frequencies between 260 and 330 Hz, which is optimal for stimulating Pacinian corpuscles, the main skin vibration receptors (<xref ref-type="bibr" rid="B37">Kandel et al., 2014</xref>). Vibrotactile actuators were arranged in groups of 4 (organized by rows on the back), and each group was activated together (actuator activation was performed through an Arduino<sup>&#x00AE;</sup> platform communicating in real time with MATLAB<sup>&#x00AE;</sup> (R2017b). All actuators placed along the same row vibrated with the same intensity, with maximum intensity when the virtual prosthesis was positioned at a specific movement angle (0&#x00B0;, 30&#x00B0;, 60&#x00B0;, or 90&#x00B0;); there was a vibratory overlap of 30&#x00B0; with the adjacent rows to produce a continuity effect on vibratory perception (<xref ref-type="bibr" rid="B54">Moreira et al., 2021</xref>; <xref ref-type="fig" rid="F1">Figure 1B</xref>.2). For more details on the vibrotactile stimulation device see <xref ref-type="supplementary-material" rid="DS1">Supplementary Table 3</xref> and <xref ref-type="supplementary-material" rid="DS1">Supplementary Figure 2</xref> in the <xref ref-type="supplementary-material" rid="DS1">Supplementary Material</xref>.</p>
</sec>
<sec id="S2.SS2.SSS6">
<title>Training protocol</title>
<p>Two preliminary sessions were conducted prior to the start of the training protocol to familiarize participants with the EMG-based HMI. In these sessions, the participants learned to associate the residual limb muscular contraction with virtual prosthesis movements (for details, see <xref ref-type="supplementary-material" rid="DS1">Supplementary Figure 3</xref> in the in the <xref ref-type="supplementary-material" rid="DS1">Supplementary Material</xref>). After this stage, the training was based on an operant conditioning paradigm, in which there was a progressive increase in the difficulty of the tasks with contingent feedback and rewards to reinforce learning. Overall, contingent feedback itself has a positively reinforcing effect, but this was supplemented with motivational messages, such as &#x201C;congratulations,&#x201D; at the end of each task block (<xref ref-type="bibr" rid="B71">Skinner, 1938</xref>; <xref ref-type="bibr" rid="B37">Kandel et al., 2014</xref>).</p>
<p>In total, six training sessions lasting 30 mins each, consisting of task blocks involving motor control were conducted twice a week. The maximum of task blocks was performed within the 30 mins. For each task, the participants moved the virtual prosthesis until they reached a specific predefined position set at four target angles: 0&#x00B0;, 30&#x00B0;, 60&#x00B0; or 90&#x00B0; (a combination of angles with targets at 0&#x00B0;, 45&#x00B0;, and 90&#x00B0; was also used as a preliminary stage for each new level of difficulty). To guide the movements in real time, the participants were presented with a visual clue (semicircular ruler) indicating the position to which they should move the virtual prosthesis (<xref ref-type="fig" rid="F2">Figure 2A</xref>).</p>
<fig id="F2" position="float">
<label>FIGURE 2</label>
<caption><p>Training protocol with the EMG-based HMI scheme. <bold>(A)</bold> Training protocol diagram. Feedback within the virtual environment consisted of visual clues indicating the target angles that the participants had to reproduce. The target angles used were 0&#x00B0;, 30&#x00B0;, 60&#x00B0;, and 90&#x00B0;. Each angle was randomly presented four times during each task block (the participant had 20 s to establish each target angle). In addition to visual feedback, the participants received concomitant vibrotactile feedback on their back. The training sessions lasted 30 mins, and within that time, as many task blocks as possible were performed. <bold>(B)</bold> Difficulty of progression. Two criteria were adopted to increase the difficulty: (i) Tolerance of antagonist muscle contraction. Initially, the antagonist muscle could have up to 80% activation in comparison to the agonist muscle. The tolerance decreased progressively by 10% at each new difficulty level (the lower the tolerance was, the greater the need to isolate the agonist muscle contraction). (ii) Precision movement. To evaluate whether a target angle has been reached, different ranges of prosthesis position, in relation to the target angle, were adopted (15&#x00B0;, 10&#x00B0;, and 5&#x00B0;: the lower the range was, the greater the necessary precision of movement). Given a tolerance of antagonist muscle contraction, the different precision difficulties were progressively combined. If the participant had a success rate &#x2265; 75% on a task block with a certain combination of difficulties, the next block instituted a new combination of difficulties.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnhum-16-870103-g002.tif"/>
</fig>
<p>The following criteria were adopted to increase the task difficulty: (a) Tolerance of antagonist muscle contraction. Antagonist muscle activation up to 80% in relation to agonist was initially established, which decreased by 10% at each new difficulty level; (b) Precision of movement. For a task to be considered correctly performed, a range of positions was adopted in relation to the target angle. The difficulty levels varied from the target angle as follows: &#x00B1;15&#x00B0;, &#x00B1;10&#x00B0;, and &#x00B1; 5&#x00B0;. Therefore, initially, there was no need for refined muscle control (regarding the isolation of agonist muscle contraction) and movement precision. However, this became necessary as the difficulty gradually increased (<xref ref-type="fig" rid="F2">Figure 2B</xref>).</p>
<p>In this manner, given a particular difficulty combination (tolerance of antagonist muscle contraction and precision of movement), the participants performed a preliminary block and then a task block composed of a set of target angles, 0&#x00B0;, 30&#x00B0;, 60&#x00B0;, and 90&#x00B0; (each presented randomly four times), for a total of sixteen tasks for each block. After an attempt of 20 s, or if the target angle was hit, the next task was presented (if the participant did not hit the task within 20 s, it was considered a failure, although the participant did not receive any messages indicating the failure). The performance was assessed at the end of the task block, and the difficulty was increased if the participant had a success rate of 75% (this cutoff was heuristically calculated from previous pilot studies) or more; otherwise, the same difficulty combination was performed again.</p>
</sec>
</sec>
<sec id="S2.SS3">
<title>Embodiment assessment</title>
<p>We assessed a set of measurements to examine the induction and enhancement of virtual prosthesis embodiment. This test set was selected based on affective, spatial perception, and motor mechanisms. These three features were proposed by <xref ref-type="bibr" rid="B21">De Vignemont (2011)</xref> and underlie the development of the object&#x2019;s embodiment. In addition, we investigated self-perception regarding the sense of ownership and agency. Affective, spatial perception and self-perception measurements were assessed at the beginning and end of the experimental protocol. Motor measurement was performed in all training sessions.</p>
<sec id="S2.SS3.SSS1">
<title>Affective measurement</title>
<p>Skin conductance response (SCR) was used to detect inherent physiological responses when the virtual prosthesis was threatened (<xref ref-type="bibr" rid="B19">Critchley, 2002</xref>; <xref ref-type="bibr" rid="B1">Alimardani et al., 2016</xref>). SCR acquisition was accomplished using the e-Health<sup>&#x00AE;</sup> (2.0) system coupled to an Arduino Uno<sup>&#x00AE;</sup>, with a sampling rate of 20 Hz. The SCR recording was performed at the initial session and at the penultimate training session; for this, surface electrodes (Ag/AgCl) were placed on the intermediate phalanx of the second and third left hand fingers (<xref ref-type="bibr" rid="B24">Ehrsson et al., 2008</xref>). This recording was made 2 mins before and during the simulation of a threat&#x2212;a chandelier falling on the virtual prosthesis (<xref ref-type="bibr" rid="B85">Yuan and Steed, 2010</xref>). At the beginning of the training sessions, all participants watched a video showing the fall of the chandelier on the virtual prosthesis, and they were informed that at some point during the sessions, the same event could occur, thereby minimizing the effects of surprise on the measurements (<xref ref-type="bibr" rid="B1">Alimardani et al., 2016</xref>). The participants did not know on which day this test would be conducted. Finally, the magnitude of the SCR was analyzed (<xref ref-type="bibr" rid="B12">Braithwaite et al., 2015</xref>; <xref ref-type="fig" rid="F3">Figure 3A</xref>).</p>
<fig id="F3" position="float">
<label>FIGURE 3</label>
<caption><p>Embodiment assessment. <bold>(A)</bold> Affective measurement&#x2013;Skin conductance response. Two surface electrodes were placed on the intermediate phalanges of the second and third left hand fingers, and the SCR was recorded once a chandelier dropped on the virtual prosthesis, representing a threatening stimulus. <bold>(B)</bold> Spatial perception measurement&#x2013;Crossmodal congruency task (CCT). During the CCT, visual stimuli were applied within the VR environment close to the avatar&#x2019;s feet (close to the hallux or heel) soon after the appearance of the visual distractor, and a vibratory stimulus was applied on the participant&#x2019;s back (thoracic or lumbar). The CCT was composed of sixteen different combinations of visual and vibrotactile stimuli, each presented four times at random, for a total of sixty-four trials. The participants were instructed to press a button corresponding to the location on their back where they received the vibratory stimulation as quickly as possible while ignoring the visual distractor. <bold>(C)</bold> Motor measurement. The participants moved the virtual prosthesis until they reached a specific predetermined position set at four target angles: 0&#x00B0;, 30&#x00B0;, 60&#x00B0;, or 90&#x00B0;. The participants&#x2019; performances, execution time and success rates during the training were used to assess their ability to control the virtual prosthesis. <bold>(D)</bold> Self-perception. The participants quantified on a scale from 0 to 10, where 0 indicated &#x201C;none&#x201D; and 10 indicated &#x201C;totally,&#x201D; how much they felt the virtual prosthesis was part of their own body and how much they felt that they could control it.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnhum-16-870103-g003.tif"/>
</fig>
</sec>
<sec id="S2.SS3.SSS2">
<title>Spatial perception measurement</title>
<p>A crossmodal congruency task (CCT) involving a visual stimulus (visual distractors on the virtual body) and a concomitant tactile stimulus (vibratory stimulation of the participant&#x2019;s back) and the respective crossmodal congruency effect (CCE) were used to identify visuotactile interference in the peripersonal space (<xref ref-type="bibr" rid="B65">Seng&#x00FC;l et al., 2012</xref>; <xref ref-type="bibr" rid="B47">Marini et al., 2014</xref>; <xref ref-type="bibr" rid="B67">Shokur et al., 2016</xref>). To perform this task, the participants visualized the lower limbs of the avatar and a luminous point (visual distractor) in four different positions: on either side of the hallux or heel. In addition, four vibrotactile actuators were positioned on the participants&#x2019; backs: two were placed in the thoracic region, and two were placed in the lumbar region on both sides. Therefore, there were 16 possible stimulus combinations: 4 positions of the visual distractors and 4 positions related to the vibratory stimuli, and each combination was randomly presented four times for a total of 64 repetitions in each task block. A visual distractor was presented and followed 100 ms later by vibrating stimulation for 350 ms. The participants were then instructed to press a button based on the place on their back that they had received the vibratory stimulation while ignoring the visual distractor. They had two options: upper (thoracic) or lower (lumbar). If the participant did not press the button within 2 s, the next combination was presented. The CCT protocol consisted of observing the virtual prosthesis performing knee flexion and extension movement (at an angular speed of 45&#x00B0;/s for 1 min) with or without concomitant vibratory stimulation related to virtual prosthesis movements. This observation sequence was random, and the CCT task block was performed after each paradigm. All participants previously underwent training and started this task only after reaching an accuracy of 85% in localizing the vibratory stimulus. In this manner, the CCE was calculated as the difference in the reaction time between incongruent (for instance, when a visual distractor was localized on the upper part of the foot and the vibratory stimulation was in the lumbar region) and congruent conditions (for instance, when a visual distractor was localized on the upper part of the foot and the vibratory stimulation was in the thoracic region) (<xref ref-type="bibr" rid="B46">Maravita et al., 2003</xref>; <xref ref-type="bibr" rid="B67">Shokur et al., 2016</xref>; <xref ref-type="fig" rid="F3">Figure 3B</xref> and <xref ref-type="supplementary-material" rid="DS1">Supplementary Figure 4</xref> in the <xref ref-type="supplementary-material" rid="DS1">Supplementary Material</xref>).</p>
</sec>
<sec id="S2.SS3.SSS3">
<title>Motor measurement</title>
<p>The participants&#x2019; performances, execution time and success rates, during the training were used to assess their ability to control the virtual prosthesis, considering the different levels of difficulty during the tasks, the tolerance for antagonist muscle contraction and the precision of movement (<xref ref-type="fig" rid="F3">Figure 3C</xref>).</p>
</sec>
<sec id="S2.SS3.SSS4">
<title>Self-perception</title>
<p>The participants quantified on a scale from 0 to 10, where 0 indicated &#x201C;none&#x201D; and 10 indicated &#x201C;totally,&#x201D; how much they felt the virtual prosthesis was part of their own body and how much they felt that they could control it (<xref ref-type="bibr" rid="B3">Armel and Ramachandran, 2003</xref>; <xref ref-type="fig" rid="F3">Figure 3D</xref>).</p>
</sec>
</sec>
<sec id="S2.SS4">
<title>Data analysis</title>
<p>The data analyses and electrophysiological signal analyses were performed in MATLAB<sup>&#x00AE;</sup> (R2017b). Parametric or non-parametric hypothesis tests were used based on the classification of the Kolmogorov-Smirnov test (<xref ref-type="bibr" rid="B53">Mohd Razali and Bee Wah, 2011</xref>). Differences were considered significant when <italic>p &#x003C;</italic> &#x03B1;, where &#x03B1; = 0.05.</p>
<sec id="S2.SS4.SSS1">
<title>Affective measurement</title>
<p>To compare SCR magnitudes among the 4 different periods (before and after the threat, at the beginning and end of the experimental protocol) a two-way ANOVA was used with a Tukey-Kramer <italic>post hoc</italic> correction. A one-way MANOVA was applied followed by canonical discriminant analysis to determine whether the set of variables (SCR amplitude waveforms) exhibited specific clusters based on each period of threat exposure. The SCR signal analysis, the following steps were performed: (a) smoothing the original x(t) signal by averaging it over a 3-s sliding window with 50% overlap along the whole signal and producing a x&#x2032;(t) signal; (b) calculating the phase signal from the difference y(t) = x(t)&#x2212;x&#x2032;(t); and (c) applying a logarithmic scale over the magnitude of the signals and considering the 3 s of signal before and 3 s after the application of the visual stimulus (i.e., the moment when the chandelier enters the visual field of the participant within the VR environment) (<xref ref-type="bibr" rid="B12">Braithwaite et al., 2015</xref>). The SCR signals from participant &#x201C;B&#x201D; were excluded from the analysis due to noise issues during registration.</p>
</sec>
<sec id="S2.SS4.SSS2">
<title>Spatial perception measurement</title>
<p>Statistical comparisons were performed between CCE averages considering that the visual and tactile stimuli were applied on the same and the opposite side. Thus, a two-way ANOVA with Tukey-Kramer <italic>post hoc</italic> correction was applied for this comparison. The CCE calculations, the only data that were included were from correct executions, while times greater than 1500 ms and less than 200 ms were excluded (3.4% of all trials) (<xref ref-type="bibr" rid="B65">Seng&#x00FC;l et al., 2012</xref>). The prior visualization of the virtual prosthesis movements with and without associated vibrotactile stimulation were both considered statistical factors at the beginning and end of the protocol.</p>
<p>To evaluate the relationships between CCE and SCR measures, Pearson&#x2019;s correlation coefficients were calculated together with the description of the respective linear regressions for each variable pair. Only the coefficients with <italic>p &#x003C;</italic> &#x03B1;, tested with a <italic>t</italic>-test, were considered significant.</p>
</sec>
<sec id="S2.SS4.SSS3">
<title>Motor measurement</title>
<p>The following variables were analyzed: (a) execution time and (b) success rate. The target angles were separated into intermediate (30&#x00B0; and 60&#x00B0;) and extreme (0&#x00B0; and 90&#x00B0;) angles. In addition, the data were grouped into three difficulty levels related to the precision of movements, i.e., 15&#x00B0;, 10&#x00B0;, and 5&#x00B0; to the target angle. The task execution times were compared among these three difficulty levels while separately considering the intermediate and extreme target angles (Kruskal&#x2212;Wallis test followed by Tukey-Kramer <italic>post hoc</italic> correction). Comparisons among execution times, while considering the intermediate and extreme target angles, were also performed (using the Mann&#x2212;Whitney test), as well as the analysis of success rates by comparing among average proportions and confidence interval (CI) (95%).</p>
</sec>
<sec id="S2.SS4.SSS4">
<title>Self-perception</title>
<p>Presentation of the absolute values reported by each participant (from 0 to 10) at the beginning and at the end of the protocol for the ownership and agency sense.</p>
</sec>
</sec>
</sec>
<sec id="S3" sec-type="results">
<title>Results</title>
<p>Virtual prosthesis embodiment and enhancement through the training protocol using EMG-based HMI was consistently observed in different analyses.</p>
<sec id="S3.SS1">
<title>Affective measurement</title>
<p>All participants reacted affectively to a threat to the virtual prosthesis (inside the virtual environment, a chandelier fall over the prosthesis). The affective response was indicated by a significant increase in SCR after the threatening event (<italic>F</italic> = 53.3, <italic>p &#x003C;</italic> 0.001), both at the beginning (<italic>post hoc p &#x003C;</italic> 0.001&#x2212;before and after the threat) and at the end of the experimental protocol (<italic>post hoc p &#x003C;</italic> 0.001&#x2212;before and after the threat). At the end with greater magnitude compared to the beginning (<italic>F</italic> = 85.15, <italic>p &#x003C;</italic> 0.001; <italic>post hoc p &#x003C;</italic> 0.001&#x2212;before the threat at the end and beginning; after the threat at the end and beginning) (<xref ref-type="fig" rid="F4">Figure 4A</xref>).</p>
<fig id="F4" position="float">
<label>FIGURE 4</label>
<caption><p>Affective, spatial perception measurements, and self-perception. <bold>(A)</bold> Skin conductance response (SCR) to a threat to the virtual prosthesis. <bold>(A.1)</bold> Two-way ANOVA with Tukey-Kramer correction. <bold>(A.2)</bold> Application of one-way MANOVA followed by canonical discriminant analysis. <bold>(B)</bold> Crossmodal congruency task (CCT) and crossmodal congruency effect (CCE) (two-way ANOVA with Tukey-Kramer correction). Comparison for stimuli applied on the same side (SS) and opposite side (OS). <bold>(C)</bold> Self-perception (absolute values quantified by the participants). <bold>(C.1)</bold> Sense of ownership. <bold>(C.2)</bold> Sense of agency. <bold>(D)</bold> Correlations between the SCR and CCE results (Pearson&#x2019;s correlation coefficients). &#x002A;<italic>p &#x003C;</italic> 0.05.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnhum-16-870103-g004.tif"/>
</fig>
</sec>
<sec id="S3.SS2">
<title>Spatial perception measurement</title>
<p>There was a significant increase in CCE for stimuli applied to the same side of the body compared with stimuli applied to opposite sides, at the end of the training (<italic>F</italic> = 7.7, <italic>p</italic> = 0.010, <italic>post hoc</italic> for the preview statistical factors VR only and VR-VT <italic>p</italic> = 0.046). This indicated, that at the end of training, the visual stimuli applied in the virtual environment were considered close to the real body. No difference in stimulus application between sides was found in CCE at the beginning (<italic>F</italic> = 0.06, <italic>p</italic> = 0.798) (<xref ref-type="fig" rid="F4">Figure 4B</xref>).</p>
<p>Furthermore, there was a significant correlation between the mean SCR and CCE values (stimuli applied on the same side) at the beginning and end of the training (beginning <italic>r</italic> = 0.82, <italic>p</italic> = 0.047; final <italic>r</italic> = 0.90, <italic>p</italic> = 0.014) (<xref ref-type="fig" rid="F4">Figure 4D</xref>).</p>
</sec>
<sec id="S3.SS3">
<title>Motor measurement</title>
<p>Motor training with the EMG-based HMI provided an improvement in the ability to control the virtual prosthesis, considering that there was a success rate &#x003E; 75%, even with the progressive increase in the difficulty of the tasks (<xref ref-type="fig" rid="F5">Figure 5C</xref>). However, although the success rate was always high, the execution time was longer in the more difficult/complex conditions. With intermediate angles (30&#x00B0; and 60&#x00B0;), the time to execute the tasks was longer than with extreme target angles (0&#x00B0; and 90&#x00B0;) regardless of the precision of movement required during the task (precision of movement 15&#x00B0;: <italic>U</italic> = 263, <italic>p</italic> = 0.006; 10&#x00B0;: <italic>U</italic> = 288, <italic>p &#x003C;</italic> 0.001; 5&#x00B0;: <italic>U</italic> = 301, <italic>p &#x003C;</italic> 0.001) (<xref ref-type="fig" rid="F5">Figure 5A</xref>). The difficulty associated with greater movement precision (5&#x00B0; range in relation to the target angle) also demanded significantly more time for task execution (H = 18.038, <italic>p &#x003C;</italic> 0.001; <italic>post hoc p &#x003C;</italic> 0.001 &#x2013; 5&#x00B0; and 15&#x00B0;; <italic>p</italic> = 0.019 &#x2212; 5&#x00B0; and 10&#x00B0;) (<xref ref-type="fig" rid="F5">Figure 5B</xref>).</p>
<fig id="F5" position="float">
<label>FIGURE 5</label>
<caption><p>Motor measurement. <bold>(A)</bold> Comparison of execution times between the intermediate (30&#x00B0; and 60&#x00B0;) and extreme (0&#x00B0; and 90&#x00B0;) target angles (Mann&#x2013;Whitney test). <bold>(B)</bold> Comparison of execution times across levels of difficulty related to the precision of movement (15&#x00B0;, 10&#x00B0;, and 5&#x00B0; of variation in relation to the target angle) for the intermediate and extreme angles (Kruskal&#x2013;Wallis with Tukey&#x2013;Kramer correction). <bold>(C)</bold> Success rate on tasks involving intermediate and extreme target angles at each level of required movement precision (average of the proportion and CI). The red line indicates a success rate of 75%, and the blue line indicates 100%. &#x002A;<italic>p &#x003C;</italic> 0.05.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnhum-16-870103-g005.tif"/>
</fig>
</sec>
<sec id="S3.SS4">
<title>Self-perception</title>
<p>High self-perception scores (&#x2265;7) regarding the sense of ownership and agency of the virtual prosthesis by most participants from the beginning of the training. The scores were increased or maintained throughout the protocol, except for two participants, &#x201C;C&#x201D; and &#x201C;D,&#x201D; who reported a decrease in the sense of agency at the end of the training (<xref ref-type="fig" rid="F4">Figure 4C</xref>).</p>
</sec>
</sec>
<sec id="S4" sec-type="discussion">
<title>Discussion</title>
<p>The results of this study showed that there was induction and enhancement of virtual prosthesis embodiment through training with an EMG-based HMI. We observed that the affective response was immediate, but with training, there was an amplification of this response. These findings, along with the recalibration of the peripersonal space and the increased control capacity with training, showed an improvement in the embodiment over time. The high indices of self-perception declared by the subjects regarding their sense of ownership and agency over the virtual prosthesis also corroborated this.</p>
<sec id="S4.SS1">
<title>Affective measurement</title>
<p>All participants reacted affectively to a threat to the virtual prosthesis, indicated by a significant increase in SCR magnitude, which is a natural physiological reaction to a threat to the subject&#x2019;s own bodies (<xref ref-type="bibr" rid="B19">Critchley, 2002</xref>; <xref ref-type="bibr" rid="B5">Bach et al., 2009</xref>; <xref ref-type="bibr" rid="B12">Braithwaite et al., 2015</xref>). This response already occurred at the beginning of the protocol, but there was a significant increase at the end, indicating an amplification. Other studies with manipulations of body perception, such as those based on the rubber hand illusion (RHI) paradigm (<xref ref-type="bibr" rid="B11">Botvinick and Cohen, 1998</xref>), have also identified an increase in SCR by threatening an external object, indicating embodiment (<xref ref-type="bibr" rid="B3">Armel and Ramachandran, 2003</xref>; <xref ref-type="bibr" rid="B24">Ehrsson et al., 2008</xref>; <xref ref-type="bibr" rid="B1">Alimardani et al., 2016</xref>). In the present study, the increase in SCR magnitude at the beginning of the protocol, suggested that there must have different levels of embodiment, since a few minutes of training to control the virtual prosthesis movements was enough to achieve some embodiment. An explanation for this immediate response may be associated with virtual environment immersion: with no visual feedback from their own body and only a visualization of the virtual body from the first-person perspective, there is a decreased incompatibility between real and virtual body perception in terms of visual, proprioceptive and spatial recognition (<xref ref-type="bibr" rid="B76">Tieri et al., 2015</xref>; <xref ref-type="bibr" rid="B15">Burin et al., 2019</xref>). This idea is also supported by previous studies that point out that this anatomical congruence between the body itself and an intact virtual limb is sufficient to induce embodiment, even without visuomotor or visuotactile stimulation (<xref ref-type="bibr" rid="B76">Tieri et al., 2015</xref>; <xref ref-type="bibr" rid="B26">Fusaro et al., 2016</xref>; <xref ref-type="bibr" rid="B55">Pavone et al., 2016</xref>).</p>
</sec>
<sec id="S4.SS2">
<title>Spatial perception measurement</title>
<p>The highest CCE values were obtained when the stimuli were applied on the same side of the body than on the opposite sides at the end of the training, indicating that the visual stimuli applied in the virtual environment were considered close to the real body. In other words, there was a recalibration of the peripersonal space to include the virtual prosthesis (<xref ref-type="bibr" rid="B46">Maravita et al., 2003</xref>; <xref ref-type="bibr" rid="B45">Maravita and Iriki, 2004</xref>; <xref ref-type="bibr" rid="B80">Van Elk et al., 2013</xref>). Although the size of our limbs determines our reach space, the use of tools can alter peripersonal space (<xref ref-type="bibr" rid="B40">L&#x00E0;davas, 2002</xref>; <xref ref-type="bibr" rid="B46">Maravita et al., 2003</xref>; <xref ref-type="bibr" rid="B45">Maravita and Iriki, 2004</xref>)&#x2212;a neurocognitive representation produced from the integration of sensory information related to the body itself and the space around it (<xref ref-type="bibr" rid="B32">Holmes and Spence, 2004</xref>). Other studies using immersion in virtual environments have also shown that it is possible to extend the peripersonal space to include a tool or virtual limb in an equivalent manner to what is produced in physical environments (<xref ref-type="bibr" rid="B65">Seng&#x00FC;l et al., 2012</xref>; <xref ref-type="bibr" rid="B67">Shokur et al., 2016</xref>). The absence of significant CCE differences at the beginning of the training indicated that the recalibration of the peripersonal space is not immediate and depends on exposure/training, unlike the autonomic/affective response. In the same line of interpretation, <xref ref-type="bibr" rid="B47">Marini et al. (2014)</xref>, in an experiment using a functional prosthesis, observed that the recalibration of the peripersonal space occurred only after a long period of training. Other studies have also pointed out that the stable recalibration of the peripersonal space depends on the development of skills and prolonged use of a tool or assistive device (<xref ref-type="bibr" rid="B66">Serino et al., 2007</xref>; <xref ref-type="bibr" rid="B6">Bassolino et al., 2010</xref>).</p>
<p>Considering all this together, our interpretation is that the autonomic/affective response is dependent on the visual and proprioceptive congruence of the real and virtual body experienced through the first-person perspective (<xref ref-type="bibr" rid="B76">Tieri et al., 2015</xref>; <xref ref-type="bibr" rid="B26">Fusaro et al., 2016</xref>; <xref ref-type="bibr" rid="B55">Pavone et al., 2016</xref>). The recalibration of the peripersonal space may be linked to the processing of body perception depending on learning motor skills acquired during the training sessions (<xref ref-type="bibr" rid="B66">Serino et al., 2007</xref>; <xref ref-type="bibr" rid="B6">Bassolino et al., 2010</xref>; <xref ref-type="bibr" rid="B47">Marini et al., 2014</xref>). However, the increase in SCR magnitude and high correlation with the CCE at the end of the protocol indicated that the affective response, although it was immediate, was also strengthened during the learning process, suggesting that the embodiment can have different levels of intensity.</p>
</sec>
<sec id="S4.SS3">
<title>Motor measurement</title>
<p>An increase in the ability to control the virtual prosthesis was verified by performance analyses during training with the EMG-based HMI, as indicated by the high success rates at all levels of difficulty. Thus, it can be concluded that the participants were able to use visual and vibrotactile feedback for motor planning and execution in the control of the virtual prosthesis movements (<xref ref-type="bibr" rid="B21">De Vignemont, 2011</xref>).</p>
<p>The time required for the participants to perform the tasks was longer for the intermediate target angles (30&#x00B0; and 60&#x00B0;) than for the extremities (0&#x00B0; and 90&#x00B0;). For the intermediate angles, the time was even greater when the task required greater precision. The differences in these times can be explained by the level of complexity of the motor control strategies: simpler strategies in the case of extremity angles and more complex strategies for reproducing intermediate target angles, especially in more precise tasks.</p>
<p>This interpretation can be supported by motor control theories based on feedforward and feedback mechanisms (<xref ref-type="bibr" rid="B84">Wolpert et al., 1995</xref>; <xref ref-type="bibr" rid="B82">Wolpert and Ghahramani, 2000</xref>). In conditions where movement strategies were simpler, motor control occurred largely through feedforward mechanisms from the estimation of sensory consequences using copies of the efferent motor commands. In this way, for the extreme angles, the execution times were shorter because the predicted movements did not require major corrections during the execution. However, during tasks with more complex motor control strategies, those with intermediate target angles and higher precision, motor control occurred mainly through the sensory feedback by comparing predicted and actual movements (<xref ref-type="bibr" rid="B84">Wolpert et al., 1995</xref>, <xref ref-type="bibr" rid="B83">2011</xref>; <xref ref-type="bibr" rid="B82">Wolpert and Ghahramani, 2000</xref>). In these cases, corrections, and adjustments of the movement in real-time were determinant and explained the longer execution times during these tasks.</p>
</sec>
<sec id="S4.SS4">
<title>Self-perception</title>
<p>Participants reported high self-perception that the virtual prosthesis was part of their own body and that they could voluntarily control it. This perception remained stable or increased over the course of the training in most cases. Only two participants (C and D) reported a decreased sense of agency at the end of the protocol. However, for both, the score given in the initial evaluation for the agency sense was already the maximum value. Most likely, this result was related to the expectations created by these participants that control would be easier throughout the sessions, which did not occur due to the progressive difficulty increasing imposed during training. Additionally, it is worth noting that this effect did not affect their sense of ownership since both reported an increasing of ownership at the end of the training, which reinforces this interpretation.</p>
<p>The reports of some participants who felt the phantom limb were also interesting and corroborate the self-perception of ownership and agency over the virtual prosthesis: participant C reported that at the end of the protocol he could control the movements of the phantom limb, which he could not do before. Participant E had control over the movements of the phantom limb from the beginning and reported that he used the same strategy to flex the phantom limb to control the knee flexion movement of the virtual prosthesis. Participant F felt the phantom limb in constant flexion and could not move it. However, during the protocol in immersion VR environment, she could actively flex the phantom limb together with the virtual prosthesis movement (&#x201C;It&#x2019;s like I have two legs moving&#x201D;).</p>
<p>Finally, there are two main points of our work that should be highlighted: (a) system and protocol and (b) embodiment investigation.</p>
</sec>
<sec id="S4.SS5">
<title>System and protocol</title>
<p>The critical difference in the proposed protocol is the combination of components and strategies aimed toward achieving embodiment, in this case, of the virtual prosthesis. These strategies include myoelectric control, immersion in a VR environment and vibrotactile stimulation.</p>
<p>Visual feedback in immersion in a VR environment was chosen based on the results of previous studies that have shown promising effects in a variety of clinical contexts (<xref ref-type="bibr" rid="B9">Bohil et al., 2011</xref>; <xref ref-type="bibr" rid="B29">Gumma and Youssef, 2019</xref>; <xref ref-type="bibr" rid="B39">Kluger et al., 2019</xref>; <xref ref-type="bibr" rid="B59">Qian et al., 2020</xref>) and in the induction of the embodiment of a body, limb or virtual object (<xref ref-type="bibr" rid="B72">Slater et al., 2009</xref>, <xref ref-type="bibr" rid="B73">2010</xref>; <xref ref-type="bibr" rid="B65">Seng&#x00FC;l et al., 2012</xref>; <xref ref-type="bibr" rid="B67">Shokur et al., 2016</xref>).</p>
<p>Regarding vibrotactile stimulation, we propose using vibrotactile feedback to represent movement (<xref ref-type="bibr" rid="B36">Jones et al., 2009</xref>; <xref ref-type="bibr" rid="B67">Shokur et al., 2016</xref>). Most current lower limb prostheses do not provide sensory feedback, which makes the user largely dependent on vision to determine the prosthetic limb position and its interaction with the environment. Furthermore, reestablishing proprioceptive sensory information is crucial for the development of embodiment (<xref ref-type="bibr" rid="B58">Proske and Gandevia, 2012</xref>; <xref ref-type="bibr" rid="B16">Butler et al., 2017</xref>) and improvement of motor control (<xref ref-type="bibr" rid="B61">Riemann and Lephart, 2002</xref>; <xref ref-type="bibr" rid="B58">Proske and Gandevia, 2012</xref>).</p>
<p>Last, although prosthetic myoelectric control has been widely explored in research and clinical environments (<xref ref-type="bibr" rid="B48">Maruishi et al., 2004</xref>; <xref ref-type="bibr" rid="B64">Sebelius et al., 2005</xref>; <xref ref-type="bibr" rid="B34">Jackson, 2008</xref>; <xref ref-type="bibr" rid="B39">Kluger et al., 2019</xref>; <xref ref-type="bibr" rid="B69">Sime, 2019</xref>), in general, these studies did not use an immersive virtual environment and focused on control conditions rather than the closed loop between control and feedback, as we propose in this work.</p>
</sec>
<sec id="S4.SS6">
<title>Embodiment investigation</title>
<p>The embodiment of an external object is a complex concept and experience. Currently, the literature shows an overlap of terms and definitions (<xref ref-type="bibr" rid="B21">De Vignemont, 2011</xref>; <xref ref-type="bibr" rid="B43">Makin et al., 2017</xref>; <xref ref-type="bibr" rid="B86">Zbinden et al., 2022</xref>). Here we based our protocol mostly on the definition provided by <xref ref-type="bibr" rid="B21">De Vignemont (2011)</xref>; <xref ref-type="bibr" rid="B43">Makin et al. (2017)</xref>, where there are more practical aspects to be implemented in the therapeutic context: &#x201C;the ability to process properties of this object at the sensory, motor and/or affective levels in the same way that the properties of one&#x2019;s own body parts.&#x201D; This definition is interesting because it inherently brings an ecological and interactive perspective, where the embodiment of an external object can only be achieved if the subjects systematically interact with the environment (including the object itself) through specific sensorimotor criteria.</p>
<p>There are studies that have investigated embodiment through psychophysical tests (<xref ref-type="bibr" rid="B66">Serino et al., 2007</xref>; <xref ref-type="bibr" rid="B47">Marini et al., 2014</xref>; <xref ref-type="bibr" rid="B67">Shokur et al., 2016</xref>), reports and/or electrophysiological activity (<xref ref-type="bibr" rid="B3">Armel and Ramachandran, 2003</xref>; <xref ref-type="bibr" rid="B25">Ehrsson et al., 2007</xref>, <xref ref-type="bibr" rid="B24">2008</xref>). However, we claim that since different mechanisms are underling and influencing the induction of embodiment (<xref ref-type="bibr" rid="B21">De Vignemont, 2011</xref>; <xref ref-type="bibr" rid="B43">Makin et al., 2017</xref>; <xref ref-type="bibr" rid="B86">Zbinden et al., 2022</xref>), multiple tasks and measurements are required to cover all embodiment dimensions.</p>
<p>This perspective converges with the different aspects and contexts of embodiment reported in the literature, such as quick illusions in experiment of visuotactile congruence (RHI&#x2212;synchronous tactile stimulation between a rubber hand that is under the visual field and the real hand hidden) (<xref ref-type="bibr" rid="B11">Botvinick and Cohen, 1998</xref>) or with visuomotor congruence, in studies involving virtual reality in voluntary control paradigms (<xref ref-type="bibr" rid="B18">Cole et al., 2009</xref>; <xref ref-type="bibr" rid="B72">Slater et al., 2009</xref>; <xref ref-type="bibr" rid="B42">Ma and Hommel, 2015</xref>), and embodiment of assistive technology in the long-term (<xref ref-type="bibr" rid="B66">Serino et al., 2007</xref>; <xref ref-type="bibr" rid="B17">Canzoneri et al., 2013</xref>; <xref ref-type="bibr" rid="B47">Marini et al., 2014</xref>).</p>
<p>Therefore, the association of different factors and mechanisms not only enriches the comprehension of the induction and enhancement of embodiment (in this case, of the virtual prosthesis), but they are necessary (<xref ref-type="bibr" rid="B21">De Vignemont, 2011</xref>; <xref ref-type="bibr" rid="B43">Makin et al., 2017</xref>; <xref ref-type="bibr" rid="B86">Zbinden et al., 2022</xref>).</p>
<p>A limitation of the study is the size and characteristics of the sample. Most of the participants were men and young adults (age range between 18 and 46 years) with traumatic amputations. Studies in people with amputations of other etiologies, ages and in women can clarify what the embodiment process is like under these different conditions.</p>
<p>Future studies with a larger sample and control groups, in addition to randomized clinical trials, are still necessary. Follow-up research is also recommended to obtain a better understanding of whether the modifications are permanent and can be extended to the use of physical prostheses. Nevertheless, these findings show the potential for the use of this system and protocol in the context of rehabilitation of people with amputation in the preprosthetic phase.</p>
</sec>
</sec>
<sec id="S5" sec-type="data-availability">
<title>Data availability statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec id="S6">
<title>Ethics statement</title>
<p>The studies involving human participants were reviewed and approved by the Universidade Federal de S&#x00E3;o Paulo and Hospital Municipal Jos&#x00E9; de Carvalho Florence. The patients/participants provided their written informed consent to participate in this study. Written informed consent was obtained from the individual(s) for the publication of any potentially identifiable images or data included in this article.</p>
</sec>
<sec id="S7">
<title>Author contributions</title>
<p>KR contributed to the conception and design of the work, acquisition, analysis, and interpretation of data, and writing and review of the article. JM contributed to the conception of the work, development of programs and devices, and review of the article. DP contributed to the development of devices used in the work and review of the article. RD assisted in analysis of data. TS contributed to the development of devices used in the work. JN did the development of programs used in the work. MN did the conception of the work. EC reviewed the article. JF contributed to the conception and design of the work, analysis and interpretation of data, and writing and review of the article. All authors contributed to the article and approved the submitted version.</p>
</sec>
</body>
<back>
<sec id="S8" sec-type="funding-information">
<title>Funding</title>
<p>This study was funded by the Conselho Nacional de Desenvolvimento Cient&#x00ED;fico e Tecnol&#x00F3;gico (n&#x00B0; 442563-2016/7) and Coordena&#x00E7;&#x00E3;o de Aperfei&#x00E7;oamento de Pessoal de N&#x00ED;vel Superior (n&#x00B0; 001).</p>
</sec>
<ack><p>We thank all the volunteers who participated in this research. We also thank Karina Rabello Casali, Henrique Alves Amorim, Adenauer Girardi Casali, Matheus Cardoso Moraes (Universidade Federal de S&#x00E3;o Paulo &#x2013; Instituto de Ci&#x00EA;ncia e Tecnologia), Fabiano Vittoretti Pereira, Renato Jos&#x00E9; Soares, and all collaborators of the Centro de Reabilita&#x00E7;&#x00E3;o Lucy Montoro and Associa&#x00E7;&#x00E3;o Paulista para o Desenvolvimento da Medicina (SPDM) for their support during the experiments. Illustration <xref ref-type="fig" rid="F1">Figure 1A</xref>.1 was designed by Kjpargeter/Freepik.</p>
</ack>
<sec id="S9" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="S10" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<sec id="S11" sec-type="supplementary-material">
<title>Supplementary material</title>
<p>The Supplementary Material for this article can be found online at: <ext-link ext-link-type="uri" xlink:href="https://www.frontiersin.org/articles/10.3389/fnhum.2022.870103/full#supplementary-material">https://www.frontiersin.org/articles/10.3389/fnhum.2022.870103/full#supplementary-material</ext-link></p>
<supplementary-material xlink:href="Data_Sheet_1.pdf" id="DS1" mimetype="application/pdf" xmlns:xlink="http://www.w3.org/1999/xlink"/>
</sec>
<glossary>
<title>Abbreviations</title>
<def-list id="DL1">
<def-item><term>CCE</term><def><p>crossmodal congruency effect</p></def></def-item>
<def-item><term>CCT</term><def><p>crossmodal congruency task</p></def></def-item>
<def-item><term>EMG</term><def><p>electromyography</p></def></def-item>
<def-item><term>HMI</term><def><p>human-machine interface</p></def></def-item>
<def-item><term>RMS</term><def><p>root mean square</p></def></def-item>
<def-item><term>SCR</term><def><p>skin conductance response</p></def></def-item>
<def-item><term>VR</term><def><p>virtual reality</p></def></def-item>
<def-item><term>RHI</term><def><p>rubber hand illusion.</p></def></def-item>
</def-list>
</glossary>
<ref-list>
<title>References</title>
<ref id="B1"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Alimardani</surname> <given-names>M.</given-names></name> <name><surname>Nishio</surname> <given-names>S.</given-names></name> <name><surname>Ishiguro</surname> <given-names>H.</given-names></name></person-group> (<year>2016</year>). <article-title>Removal of proprioception by BCI raises a stronger body ownership illusion in control of a humanlike robot.</article-title> <source><italic>Sci. Rep.</italic></source> <volume>6</volume>:<fpage>33514</fpage>. <pub-id pub-id-type="doi">10.1038/srep33514</pub-id> <pub-id pub-id-type="pmid">27654174</pub-id></citation></ref>
<ref id="B2"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Antfolk</surname> <given-names>C.</given-names></name> <name><surname>D&#x2019;Alonzo</surname> <given-names>M.</given-names></name> <name><surname>Controzzi</surname> <given-names>M.</given-names></name> <name><surname>Lundborg</surname> <given-names>G.</given-names></name> <name><surname>Rosen</surname> <given-names>B.</given-names></name> <name><surname>Sebelius</surname> <given-names>F.</given-names></name><etal/></person-group> (<year>2013</year>). <article-title>Artificial redirection of sensation from prosthetic fingers to the phantom hand map on transradial amputees: vibrotactile versus mechanotactile sensory feedback.</article-title> <source><italic>IEEE Trans. Neural Syst. Rehabil. Eng.</italic></source> <volume>21</volume> <fpage>112</fpage>&#x2013;<lpage>120</lpage>. <pub-id pub-id-type="doi">10.1109/TNSRE.2012.2217989</pub-id> <pub-id pub-id-type="pmid">23033439</pub-id></citation></ref>
<ref id="B3"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Armel</surname> <given-names>K. C.</given-names></name> <name><surname>Ramachandran</surname> <given-names>V. S.</given-names></name></person-group> (<year>2003</year>). <article-title>Projecting sensations to external objects: evidence from skin conductance response.</article-title> <source><italic>Proc. R. Soc. B Biol. Sci.</italic></source> <volume>270</volume> <fpage>1499</fpage>&#x2013;<lpage>1506</lpage>. <pub-id pub-id-type="doi">10.1098/rspb.2003.2364</pub-id> <pub-id pub-id-type="pmid">12965016</pub-id></citation></ref>
<ref id="B4"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Arnhoff</surname> <given-names>F. N.</given-names></name> <name><surname>Mehl</surname> <given-names>M. C.</given-names></name></person-group> (<year>1963</year>). <article-title>Body image deterioration in paraplegia</article-title>. <source><italic>J. Nerv. Ment. Dis.</italic></source> <volume>137</volume>, <fpage>88</fpage>&#x2013;<lpage>92</lpage>.</citation></ref>
<ref id="B5"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bach</surname> <given-names>D. R.</given-names></name> <name><surname>Flandin</surname> <given-names>G.</given-names></name> <name><surname>Friston</surname> <given-names>K. J.</given-names></name> <name><surname>Dolan</surname> <given-names>R. J.</given-names></name></person-group> (<year>2009</year>). <article-title>Time-series analysis for rapid event-related skin conductance responses.</article-title> <source><italic>J. Neurosci. Methods</italic></source> <volume>184</volume> <fpage>224</fpage>&#x2013;<lpage>234</lpage>. <pub-id pub-id-type="doi">10.1016/j.jneumeth.2009.08.005</pub-id> <pub-id pub-id-type="pmid">19686778</pub-id></citation></ref>
<ref id="B6"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bassolino</surname> <given-names>M.</given-names></name> <name><surname>Serino</surname> <given-names>A.</given-names></name> <name><surname>Ubaldi</surname> <given-names>S.</given-names></name> <name><surname>L&#x00E0;davas</surname> <given-names>E.</given-names></name></person-group> (<year>2010</year>). <article-title>Everyday use of the computer mouse extends peripersonal space representation.</article-title> <source><italic>Neuropsychologia</italic></source> <volume>48</volume> <fpage>803</fpage>&#x2013;<lpage>811</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuropsychologia.2009.11.009</pub-id> <pub-id pub-id-type="pmid">19931547</pub-id></citation></ref>
<ref id="B7"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bekrater-Bodmann</surname> <given-names>R.</given-names></name> <name><surname>Reinhard</surname> <given-names>I.</given-names></name> <name><surname>Diers</surname> <given-names>M.</given-names></name> <name><surname>Fuchs</surname> <given-names>X.</given-names></name> <name><surname>Flor</surname> <given-names>H.</given-names></name></person-group> (<year>2021</year>). <article-title>Relationship of prosthesis ownership and phantom limb pain: results of a survey in 2383 limb amputees.</article-title> <source><italic>Pain</italic></source> <volume>162</volume> <fpage>630</fpage>&#x2013;<lpage>640</lpage>. <pub-id pub-id-type="doi">10.1097/j.pain.0000000000002063</pub-id> <pub-id pub-id-type="pmid">32868751</pub-id></citation></ref>
<ref id="B8"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Black</surname> <given-names>C.</given-names></name> <name><surname>Voigts</surname> <given-names>J.</given-names></name> <name><surname>Agrawal</surname> <given-names>U.</given-names></name> <name><surname>Ladow</surname> <given-names>M.</given-names></name> <name><surname>Santoyo</surname> <given-names>J.</given-names></name> <name><surname>Moore</surname> <given-names>C.</given-names></name></person-group> (<year>2017</year>). <article-title>Open ephys electroencephalography (Open Ephys +EEG): a modular, low-cost, open-source solution to human neural recording.</article-title> <source><italic>Physiol. Behav.</italic></source> <volume>176</volume> <fpage>139</fpage>&#x2013;<lpage>148</lpage>. <pub-id pub-id-type="doi">10.1016/j.physbeh.2017.03.040</pub-id> <pub-id pub-id-type="pmid">28363838</pub-id></citation></ref>
<ref id="B9"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bohil</surname> <given-names>C. J.</given-names></name> <name><surname>Alicea</surname> <given-names>B.</given-names></name> <name><surname>Biocca</surname> <given-names>F. A.</given-names></name></person-group> (<year>2011</year>). <article-title>Virtual reality in neuroscience research and therapy.</article-title> <source><italic>Nat. Rev. Neurosci.</italic></source> <volume>12</volume> <fpage>752</fpage>&#x2013;<lpage>762</lpage>. <pub-id pub-id-type="doi">10.1038/nrn3122</pub-id> <pub-id pub-id-type="pmid">22048061</pub-id></citation></ref>
<ref id="B10"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Botega</surname> <given-names>N. J.</given-names></name> <name><surname>Bio</surname> <given-names>M. R.</given-names></name> <name><surname>Zomignani</surname> <given-names>M. A.</given-names></name> <name><surname>Garcia</surname> <given-names>C.</given-names> <suffix>Jr.</suffix></name> <name><surname>Pereira</surname> <given-names>W. A. B.</given-names></name></person-group> (<year>1995</year>). <article-title>Transtornos do humor em enfermaria de cl&#x00ED;nica m&#x00E9;dica e valida&#x00E7;&#x00E3;o de escala de medida (HAD) de ansiedade e depress&#x00E3;o.</article-title> <source><italic>Rev. Sa&#x00FA;de P&#x00FA;blica</italic></source> <volume>29</volume> <fpage>359</fpage>&#x2013;<lpage>363</lpage>. <pub-id pub-id-type="doi">10.1590/s0034-89101995000500004</pub-id> <pub-id pub-id-type="pmid">8731275</pub-id></citation></ref>
<ref id="B11"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Botvinick</surname> <given-names>M.</given-names></name> <name><surname>Cohen</surname> <given-names>J.</given-names></name></person-group> (<year>1998</year>). <article-title>Rubber hands &#x2018;feel&#x2019; touch that eyes see.</article-title> <source><italic>Nature</italic></source> <volume>391</volume>:<fpage>756</fpage>.</citation></ref>
<ref id="B12"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Braithwaite</surname> <given-names>J. J.</given-names></name> <name><surname>Watson</surname> <given-names>D. G.</given-names></name> <name><surname>Jones</surname> <given-names>R.</given-names></name> <name><surname>Rowe</surname> <given-names>M.</given-names></name></person-group> (<year>2015</year>). <source><italic>A Guide for Analysing Electrodermal Activity &#x0026; Skin Conductance Responses (SCRs) for Psychophysiological Experiments.</italic></source> <comment>Technical Report, 2nd version.</comment> <publisher-loc>Birmingham</publisher-loc>: <publisher-name>Selective Attention &#x0026; Awareness Laboratory (SAAL) Behavioural Brain Sciences Centre, University of Birmingham</publisher-name>.</citation></ref>
<ref id="B13"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Buck</surname> <given-names>L. E.</given-names></name> <name><surname>Park</surname> <given-names>S.</given-names></name> <name><surname>Bodenheimer</surname> <given-names>B.</given-names></name></person-group> (<year>2020</year>). &#x201C;<article-title>Determining peripersonal space boundaries and their plasticity in relation to object and agent characteristics in an immersive virtual environment</article-title>,&#x201D; in <source><italic>Proceedings of the IEEE Conference on Virtual Reality and 3D User Interfaces</italic></source> (<publisher-loc>Atlanta, GA</publisher-loc>: <publisher-name>IEEE</publisher-name>), <fpage>332</fpage>&#x2013;<lpage>342</lpage>. <pub-id pub-id-type="doi">10.1109/vr46266.2020.00053</pub-id></citation></ref>
<ref id="B14"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bunderson</surname> <given-names>N. E.</given-names></name></person-group> (<year>2014</year>). <article-title>Real-time control of an interactive impulsive virtual prosthesis.</article-title> <source><italic>IEEE Trans. Neural Syst. Rehabil. Eng.</italic></source> <volume>22</volume> <fpage>363</fpage>&#x2013;<lpage>370</lpage>. <pub-id pub-id-type="doi">10.1109/TNSRE.2013.2274599</pub-id> <pub-id pub-id-type="pmid">23996579</pub-id></citation></ref>
<ref id="B15"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Burin</surname> <given-names>D.</given-names></name> <name><surname>Kilteni</surname> <given-names>K.</given-names></name> <name><surname>Rabuffetti</surname> <given-names>M.</given-names></name> <name><surname>Slater</surname> <given-names>M.</given-names></name> <name><surname>Pia</surname> <given-names>L.</given-names></name></person-group> (<year>2019</year>). <article-title>Body ownership increases the interference between observed and executed movements.</article-title> <source><italic>PLoS One</italic></source> <volume>14</volume>:<fpage>e0209899</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0209899</pub-id> <pub-id pub-id-type="pmid">30605454</pub-id></citation></ref>
<ref id="B16"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Butler</surname> <given-names>A. A.</given-names></name> <name><surname>H&#x00E9;roux</surname> <given-names>M. E.</given-names></name> <name><surname>Gandevia</surname> <given-names>S. C.</given-names></name></person-group> (<year>2017</year>). <article-title>Body ownership and a new proprioceptive role for muscle spindles.</article-title> <source><italic>Acta Physiol.</italic></source> <volume>220</volume> <fpage>19</fpage>&#x2013;<lpage>27</lpage>. <pub-id pub-id-type="doi">10.1111/apha.12792</pub-id> <pub-id pub-id-type="pmid">27561829</pub-id></citation></ref>
<ref id="B17"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Canzoneri</surname> <given-names>E.</given-names></name> <name><surname>Marzolla</surname> <given-names>M.</given-names></name> <name><surname>Amoresano</surname> <given-names>A.</given-names></name> <name><surname>Verni</surname> <given-names>G.</given-names></name> <name><surname>Serino</surname> <given-names>A.</given-names></name></person-group> (<year>2013</year>). <article-title>Amputation and prosthesis implantation shape body and peripersonal space representations.</article-title> <source><italic>Sci. Rep.</italic></source> <volume>3</volume> <fpage>1</fpage>&#x2013;<lpage>8</lpage>. <pub-id pub-id-type="doi">10.1038/srep02844</pub-id> <pub-id pub-id-type="pmid">24088746</pub-id></citation></ref>
<ref id="B18"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cole</surname> <given-names>J.</given-names></name> <name><surname>Crowle</surname> <given-names>S.</given-names></name> <name><surname>Austwick</surname> <given-names>G.</given-names></name> <name><surname>Slater</surname> <given-names>D. H.</given-names></name></person-group> (<year>2009</year>). <article-title>Exploratory findings with virtual reality for phantom limb pain?; from stump motion to agency and analgesia.</article-title> <source><italic>Disabil. Rehabil.</italic></source> <volume>31</volume> <fpage>846</fpage>&#x2013;<lpage>854</lpage>. <pub-id pub-id-type="doi">10.1080/09638280802355197</pub-id> <pub-id pub-id-type="pmid">19191061</pub-id></citation></ref>
<ref id="B19"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Critchley</surname> <given-names>H. D.</given-names></name></person-group> (<year>2002</year>). <article-title>Electrodermal responses: what happens in the brain.</article-title> <source><italic>Neuroscientist</italic></source> <volume>8</volume> <fpage>132</fpage>&#x2013;<lpage>142</lpage>. <pub-id pub-id-type="doi">10.1177/107385840200800209</pub-id> <pub-id pub-id-type="pmid">11954558</pub-id></citation></ref>
<ref id="B20"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>D&#x2019;Alonzo</surname> <given-names>M.</given-names></name> <name><surname>Clemente</surname> <given-names>F.</given-names></name> <name><surname>Cipriani</surname> <given-names>C.</given-names></name></person-group> (<year>2015</year>). <article-title>Vibrotactile stimulation promotes embodiment of an Alien hand in amputees with phantom sensations.</article-title> <source><italic>IEEE Trans. Neural Syst. Rehabil. Eng.</italic></source> <volume>23</volume> <fpage>450</fpage>&#x2013;<lpage>457</lpage>. <pub-id pub-id-type="doi">10.1109/TNSRE.2014.2337952</pub-id> <pub-id pub-id-type="pmid">25051556</pub-id></citation></ref>
<ref id="B21"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>De Vignemont</surname> <given-names>F.</given-names></name></person-group> (<year>2011</year>). <article-title>Embodiment, ownership and disownership.</article-title> <source><italic>Conscious. Cogn.</italic></source> <volume>20</volume> <fpage>82</fpage>&#x2013;<lpage>93</lpage>. <pub-id pub-id-type="doi">10.1016/j.concog.2010.09.004</pub-id> <pub-id pub-id-type="pmid">20943417</pub-id></citation></ref>
<ref id="B22"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dietrich</surname> <given-names>C.</given-names></name> <name><surname>Nehrdich</surname> <given-names>S.</given-names></name> <name><surname>Seifert</surname> <given-names>S.</given-names></name> <name><surname>Blume</surname> <given-names>K. R.</given-names></name> <name><surname>Miltner</surname> <given-names>W. H. R.</given-names></name> <name><surname>Hofmann</surname> <given-names>G. O.</given-names></name><etal/></person-group> (<year>2018</year>). <article-title>Leg prosthesis with somatosensory feedback reduces phantom limb pain and increases functionality.</article-title> <source><italic>Front. Neurol.</italic></source> <volume>9</volume>:<fpage>270</fpage>. <pub-id pub-id-type="doi">10.3389/fneur.2018.00270</pub-id> <pub-id pub-id-type="pmid">29755399</pub-id></citation></ref>
<ref id="B23"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Donovan</surname> <given-names>L.</given-names></name> <name><surname>Feger</surname> <given-names>M. A.</given-names></name> <name><surname>Hart</surname> <given-names>J. M.</given-names></name> <name><surname>Saliba</surname> <given-names>S.</given-names></name> <name><surname>Park</surname> <given-names>J.</given-names></name> <name><surname>Hertel</surname> <given-names>J.</given-names></name></person-group> (<year>2016</year>). <article-title>Effects of an auditory biofeedback device on plantar pressure in patients with chronic ankle instability.</article-title> <source><italic>Gait Posture</italic></source> <volume>44</volume> <fpage>29</fpage>&#x2013;<lpage>36</lpage>. <pub-id pub-id-type="doi">10.1016/j.gaitpost.2015.10.013</pub-id> <pub-id pub-id-type="pmid">27004629</pub-id></citation></ref>
<ref id="B24"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ehrsson</surname> <given-names>H. H.</given-names></name> <name><surname>Ros&#x00E9;n</surname> <given-names>B.</given-names></name> <name><surname>Stockselius</surname> <given-names>A.</given-names></name> <name><surname>Ragn&#x00F6;</surname> <given-names>C.</given-names></name> <name><surname>K&#x00F6;hler</surname> <given-names>P.</given-names></name> <name><surname>Lundborg</surname> <given-names>G.</given-names></name></person-group> (<year>2008</year>). <article-title>Upper limb amputees can be induced to experience a rubber hand as their own.</article-title> <source><italic>Brain</italic></source> <volume>131</volume> <fpage>3443</fpage>&#x2013;<lpage>3452</lpage>. <pub-id pub-id-type="doi">10.1093/brain/awn297</pub-id> <pub-id pub-id-type="pmid">19074189</pub-id></citation></ref>
<ref id="B25"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ehrsson</surname> <given-names>H. H.</given-names></name> <name><surname>Wiech</surname> <given-names>K.</given-names></name> <name><surname>Weiskopf</surname> <given-names>N.</given-names></name> <name><surname>Dolan</surname> <given-names>R. J.</given-names></name> <name><surname>Passingham</surname> <given-names>R. E.</given-names></name></person-group> (<year>2007</year>). <article-title>Threatening a rubber hand that you feel is yours elicits a cortical anxiety response.</article-title> <source><italic>Proc. Natl. Acad. Sci. U.S.A.</italic></source> <volume>104</volume> <fpage>9828</fpage>&#x2013;<lpage>9833</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.0610011104</pub-id> <pub-id pub-id-type="pmid">17517605</pub-id></citation></ref>
<ref id="B26"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fusaro</surname> <given-names>M.</given-names></name> <name><surname>Tieri</surname> <given-names>G.</given-names></name> <name><surname>Aglioti</surname> <given-names>S. M.</given-names></name></person-group> (<year>2016</year>). <article-title>Seeing pain and pleasure on self and others: behavioural and psychophysiological reactivity in immersive virtual reality.</article-title> <source><italic>J. Neurophysiol.</italic></source> <volume>116</volume> <fpage>2656</fpage>&#x2013;<lpage>2662</lpage>. <pub-id pub-id-type="doi">10.1152/jn.00489.2016</pub-id> <pub-id pub-id-type="pmid">27655965</pub-id></citation></ref>
<ref id="B27"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gailey</surname> <given-names>R. S.</given-names></name> <name><surname>Roach</surname> <given-names>K. E.</given-names></name> <name><surname>Applegate</surname> <given-names>E. B.</given-names></name> <name><surname>Cho</surname> <given-names>B.</given-names></name> <name><surname>Cunniffe</surname> <given-names>B.</given-names></name> <name><surname>Licht</surname> <given-names>S.</given-names></name><etal/></person-group> (<year>2002</year>). <article-title>The amputee mobility predictor: an instrument to assess determinants of the lower-limb amputee&#x2019;s ability to ambulate.</article-title> <source><italic>Arch. Phys. Med. Rehabil.</italic></source> <volume>83</volume> <fpage>613</fpage>&#x2013;<lpage>627</lpage>. <pub-id pub-id-type="doi">10.1053/apmr.2002.32309</pub-id></citation></ref>
<ref id="B28"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Graczyk</surname> <given-names>E. L.</given-names></name> <name><surname>Resnik</surname> <given-names>L.</given-names></name> <name><surname>Schiefer</surname> <given-names>M. A.</given-names></name> <name><surname>Schmitt</surname> <given-names>M. S.</given-names></name> <name><surname>Tyler</surname> <given-names>D. J.</given-names></name></person-group> (<year>2018</year>). <article-title>Home use of a neural-connected sensory prosthesis provides the functional and psychosocial experience of having a hand again.</article-title> <source><italic>Sci. Rep.</italic></source> <volume>8</volume> <fpage>1</fpage>&#x2013;<lpage>17</lpage>. <pub-id pub-id-type="doi">10.1038/s41598-018-26952-x</pub-id> <pub-id pub-id-type="pmid">29959334</pub-id></citation></ref>
<ref id="B29"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gumma</surname> <given-names>M.</given-names></name> <name><surname>Youssef</surname> <given-names>A. R.</given-names></name></person-group> (<year>2019</year>). <article-title>Is virtual reality effective in orthopedic rehabilitation? a systematic review and meta-analysis.</article-title> <source><italic>Phys. Ther.</italic></source> <volume>99</volume> <fpage>1304</fpage>&#x2013;<lpage>1325</lpage>. <pub-id pub-id-type="doi">10.1093/ptj/pzz093</pub-id> <pub-id pub-id-type="pmid">31343702</pub-id></citation></ref>
<ref id="B30"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hawker</surname> <given-names>G. A.</given-names></name> <name><surname>Mian</surname> <given-names>S.</given-names></name> <name><surname>Kendzerska</surname> <given-names>T.</given-names></name> <name><surname>French</surname> <given-names>M.</given-names></name></person-group> (<year>2011</year>). <article-title>Measures of adult pain: visual analog scale for pain (VAS Pain), numeric rating scale for pain (NRS Pain), McGill pain questionnaire (MPQ), short-form McGill pain questionnaire (SF-MPQ), chronic pain grade scale (CPGS), short form-36 bodily pain scale (SF-36 BPS), and measure of intermittent and constant osteoarthritis pain (ICOAP).</article-title> <source><italic>Arthritis Care Res.</italic></source> <volume>63</volume> <fpage>240</fpage>&#x2013;<lpage>252</lpage>. <pub-id pub-id-type="doi">10.1002/acr.20543</pub-id> <pub-id pub-id-type="pmid">22588748</pub-id></citation></ref>
<ref id="B31"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hellman</surname> <given-names>R. B.</given-names></name> <name><surname>Chang</surname> <given-names>E.</given-names></name> <name><surname>Tanner</surname> <given-names>J.</given-names></name> <name><surname>Tillery</surname> <given-names>S. I. H.</given-names></name> <name><surname>Santos</surname> <given-names>V. J.</given-names></name></person-group> (<year>2015</year>). <article-title>A robot hand testbed designed for enhancing embodiment and functional neurorehabilitation of body schema in subjects with upper limb impairment or loss.</article-title> <source><italic>Front. Hum. Neurosci.</italic></source> <volume>9</volume>:<fpage>26</fpage>. <pub-id pub-id-type="doi">10.3389/fnhum.2015.00026</pub-id> <pub-id pub-id-type="pmid">25745391</pub-id></citation></ref>
<ref id="B32"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Holmes</surname> <given-names>N. P.</given-names></name> <name><surname>Spence</surname> <given-names>&#x00C6;C.</given-names></name></person-group> (<year>2004</year>). <article-title>The body schema and multisensory representation (s) of peripersonal space.</article-title> <source><italic>Cogn. Process.</italic></source> <volume>5</volume> <fpage>94</fpage>&#x2013;<lpage>105</lpage>. <pub-id pub-id-type="doi">10.1007/s10339-004-0013-3</pub-id> <pub-id pub-id-type="pmid">16467906</pub-id></citation></ref>
<ref id="B33"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Imaizumi</surname> <given-names>S.</given-names></name> <name><surname>Asai</surname> <given-names>T.</given-names></name> <name><surname>Koyama</surname> <given-names>S.</given-names></name></person-group> (<year>2016</year>). <article-title>Embodied prosthetic arm stabilizes body posture, while unembodied one perturbs it.</article-title> <source><italic>Conscious. Cogn.</italic></source> <volume>45</volume> <fpage>75</fpage>&#x2013;<lpage>88</lpage>. <pub-id pub-id-type="doi">10.1016/j.concog.2016.08.019</pub-id> <pub-id pub-id-type="pmid">27580459</pub-id></citation></ref>
<ref id="B34"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jackson</surname> <given-names>A. E. E. F.</given-names></name></person-group> (<year>2008</year>). <article-title>Interfacing with the computational brain.</article-title> <source><italic>Bone</italic></source> <volume>23</volume> <fpage>1</fpage>&#x2013;<lpage>7</lpage>. <pub-id pub-id-type="doi">10.1109/TNSRE.2011.2158586.Interfacing</pub-id></citation></ref>
<ref id="B35"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jensen</surname> <given-names>M. P.</given-names></name> <name><surname>Karoly</surname> <given-names>P.</given-names></name> <name><surname>Braver</surname> <given-names>S.</given-names></name></person-group> (<year>1986</year>). <article-title>The measurement of clinical pain intensity?: a comparison of six methods.</article-title> <source><italic>Pain</italic></source> <volume>27</volume> <fpage>117</fpage>&#x2013;<lpage>126</lpage>.</citation></ref>
<ref id="B36"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jones</surname> <given-names>L. A.</given-names></name> <name><surname>Kunkel</surname> <given-names>J.</given-names></name> <name><surname>Piateski</surname> <given-names>E.</given-names></name></person-group> (<year>2009</year>). <article-title>Vibrotactile pattern recognition on the arm and back.</article-title> <source><italic>Perception</italic></source> <volume>38</volume> <fpage>52</fpage>&#x2013;<lpage>68</lpage>. <pub-id pub-id-type="doi">10.1068/p5914</pub-id> <pub-id pub-id-type="pmid">19323136</pub-id></citation></ref>
<ref id="B37"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kandel</surname> <given-names>E. R.</given-names></name> <name><surname>Schwartz</surname> <given-names>J. H.</given-names></name> <name><surname>Jessell</surname> <given-names>T. M.</given-names></name> <name><surname>Siegelbaum</surname> <given-names>S. A.</given-names></name> <name><surname>Hudspeth</surname> <given-names>A. J.</given-names></name></person-group> (<year>2014</year>). <source><italic>Princ&#x00ED;pios de Neuroci&#x00EA;ncias</italic></source>, <edition>5a Edn.</edition> <publisher-loc>Porto Alegre</publisher-loc>: <publisher-name>AMGH</publisher-name>, <fpage>1</fpage>&#x2013;<lpage>153</lpage>.</citation></ref>
<ref id="B38"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kendall</surname> <given-names>F. P.</given-names></name> <name><surname>McCreary</surname> <given-names>E. K.</given-names></name> <name><surname>Provance</surname> <given-names>P. G.</given-names></name></person-group> (<year>1995</year>). <source><italic>M&#x00FA;sculos Provas e Fun&#x00E7;&#x00F5;es</italic></source>, <edition>4a Edn.</edition> <publisher-loc>S&#x00E3;o Paulo</publisher-loc>: <publisher-name>Manole</publisher-name>.</citation></ref>
<ref id="B39"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kluger</surname> <given-names>D. T.</given-names></name> <name><surname>Joyner</surname> <given-names>J. S.</given-names></name> <name><surname>Wendelken</surname> <given-names>S. M.</given-names></name> <name><surname>Davis</surname> <given-names>T. S.</given-names></name> <name><surname>George</surname> <given-names>J. A.</given-names></name> <name><surname>Page</surname> <given-names>D. M.</given-names></name><etal/></person-group> (<year>2019</year>). <article-title>Virtual reality provides an effective platform for functional evaluations of closed-loop neuromyoelectric control.</article-title> <source><italic>IEEE Trans. Neural Syst. Rehabil. Eng.</italic></source> <volume>27</volume> <fpage>876</fpage>&#x2013;<lpage>886</lpage>. <pub-id pub-id-type="doi">10.1109/TNSRE.2019.2908817</pub-id> <pub-id pub-id-type="pmid">30951470</pub-id></citation></ref>
<ref id="B40"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>L&#x00E0;davas</surname> <given-names>E.</given-names></name></person-group> (<year>2002</year>). <article-title>Functional and dynamic properties of visual peripersonal space.</article-title> <source><italic>Trends Cogn. Sci.</italic></source> <volume>6</volume> <fpage>17</fpage>&#x2013;<lpage>22</lpage>.</citation></ref>
<ref id="B41"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lebedev</surname> <given-names>M. A.</given-names></name> <name><surname>Nicolelis</surname> <given-names>M. A. L.</given-names></name></person-group> (<year>2017</year>). <article-title>Brain-machine interfaces: from basic science to neuroprostheses and neurorehabilitation.</article-title> <source><italic>Physiol. Rev.</italic></source> <volume>97</volume> <fpage>767</fpage>&#x2013;<lpage>837</lpage>. <pub-id pub-id-type="doi">10.1152/physrev.00027.2016</pub-id> <pub-id pub-id-type="pmid">28275048</pub-id></citation></ref>
<ref id="B42"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ma</surname> <given-names>K.</given-names></name> <name><surname>Hommel</surname> <given-names>B.</given-names></name></person-group> (<year>2015</year>). <article-title>Body-ownership for actively operated non-corporeal objects.</article-title> <source><italic>Conscious. Cogn.</italic></source> <volume>36</volume> <fpage>75</fpage>&#x2013;<lpage>86</lpage>. <pub-id pub-id-type="doi">10.1016/j.concog.2015.06.003</pub-id> <pub-id pub-id-type="pmid">26094223</pub-id></citation></ref>
<ref id="B43"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Makin</surname> <given-names>T. R.</given-names></name> <name><surname>de Vignemont</surname> <given-names>F.</given-names></name> <name><surname>Faisal</surname> <given-names>A. A.</given-names></name></person-group> (<year>2017</year>). <article-title>Neurocognitive barriers to the embodiment of technology.</article-title> <source><italic>Nat. Biomed. Eng.</italic></source> <volume>1</volume>:<fpage>0014</fpage>. <pub-id pub-id-type="doi">10.1038/s41551-016-0014</pub-id></citation></ref>
<ref id="B44"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Marasco</surname> <given-names>P. D.</given-names></name> <name><surname>Kim</surname> <given-names>K.</given-names></name> <name><surname>Colgate</surname> <given-names>J. E.</given-names></name> <name><surname>Peshkin</surname> <given-names>M. A.</given-names></name> <name><surname>Kuiken</surname> <given-names>T. A.</given-names></name></person-group> (<year>2011</year>). <article-title>Robotic touch shifts perception of embodiment to a prosthesis in targeted reinnervation amputees.</article-title> <source><italic>Brain</italic></source> <volume>134</volume> <fpage>747</fpage>&#x2013;<lpage>758</lpage>. <pub-id pub-id-type="doi">10.1093/brain/awq361</pub-id> <pub-id pub-id-type="pmid">21252109</pub-id></citation></ref>
<ref id="B45"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Maravita</surname> <given-names>A.</given-names></name> <name><surname>Iriki</surname> <given-names>A.</given-names></name></person-group> (<year>2004</year>). <article-title>Tools for the body (schema).</article-title> <source><italic>Trends Cogn. Sci.</italic></source> <volume>8</volume> <fpage>79</fpage>&#x2013;<lpage>86</lpage>. <pub-id pub-id-type="doi">10.1016/j.tics.2003.12.008</pub-id> <pub-id pub-id-type="pmid">15588812</pub-id></citation></ref>
<ref id="B46"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Maravita</surname> <given-names>A.</given-names></name> <name><surname>Spence</surname> <given-names>C.</given-names></name> <name><surname>Driver</surname> <given-names>J.</given-names></name></person-group> (<year>2003</year>). <article-title>Multisensory integration and the body schema: close to hand and within reach.</article-title> <source><italic>Curr. Biol.</italic></source> <volume>13</volume> <fpage>531</fpage>&#x2013;<lpage>539</lpage>. <pub-id pub-id-type="doi">10.1016/S0960-9822(03)00449-4</pub-id></citation></ref>
<ref id="B47"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Marini</surname> <given-names>F.</given-names></name> <name><surname>Tagliabue</surname> <given-names>C. F.</given-names></name> <name><surname>Sposito</surname> <given-names>A. V.</given-names></name> <name><surname>Hernandez-Arieta</surname> <given-names>A.</given-names></name> <name><surname>Brugger</surname> <given-names>P.</given-names></name> <name><surname>Est&#x00E9;vez</surname> <given-names>N.</given-names></name><etal/></person-group> (<year>2014</year>). <article-title>Crossmodal representation of a functional robotic hand arises after extensive training in healthy participants.</article-title> <source><italic>Neuropsychologia</italic></source> <volume>53</volume> <fpage>178</fpage>&#x2013;<lpage>186</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuropsychologia.2013.11.017</pub-id> <pub-id pub-id-type="pmid">24296252</pub-id></citation></ref>
<ref id="B48"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Maruishi</surname> <given-names>M.</given-names></name> <name><surname>Tanaka</surname> <given-names>Y.</given-names></name> <name><surname>Muranaka</surname> <given-names>H.</given-names></name> <name><surname>Tsuji</surname> <given-names>T.</given-names></name> <name><surname>Ozawa</surname> <given-names>Y.</given-names></name> <name><surname>Imaizumi</surname> <given-names>S.</given-names></name><etal/></person-group> (<year>2004</year>). <article-title>Brain activation during manipulation of the myoelectric prosthetic hand: a functional magnetic resonance imaging study.</article-title> <source><italic>Neuroimage</italic></source> <volume>21</volume> <fpage>1604</fpage>&#x2013;<lpage>1611</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2003.12.001</pub-id> <pub-id pub-id-type="pmid">15050584</pub-id></citation></ref>
<ref id="B49"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Matsudo</surname> <given-names>S.</given-names></name> <name><surname>Araujo</surname> <given-names>T.</given-names></name> <name><surname>Matsudo</surname> <given-names>V.</given-names></name> <name><surname>Andrade</surname> <given-names>D.</given-names></name> <name><surname>Andrade</surname> <given-names>E.</given-names></name> <name><surname>Oliveira</surname> <given-names>L. C.</given-names></name><etal/></person-group> (<year>2001</year>). <article-title>Question&#x00E1;rio internacional de atividade f&#x00ED;sica (IPAQ): estudo de validade e reprodutibilidade no Brasil.</article-title> <source><italic>Rev. Bras. Ativ. F&#x00ED;s. Sa&#x00FA;de</italic></source> <volume>6</volume> <fpage>5</fpage>&#x2013;<lpage>18</lpage>.</citation></ref>
<ref id="B50"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mcdonnell</surname> <given-names>P. M.</given-names></name> <name><surname>Scott</surname> <given-names>R. N.</given-names></name> <name><surname>Dickison</surname> <given-names>J.</given-names></name> <name><surname>Theriault</surname> <given-names>R. A.</given-names></name> <name><surname>Wood</surname> <given-names>B.</given-names></name></person-group> (<year>1989</year>). <article-title>Do artificial limbs become part of the user?? new evidence.</article-title> <source><italic>J. Rehabil. Res. Dev.</italic></source> <volume>26</volume> <fpage>17</fpage>&#x2013;<lpage>24</lpage>.</citation></ref>
<ref id="B51"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mentiplay</surname> <given-names>B. F.</given-names></name> <name><surname>Perraton</surname> <given-names>L. G.</given-names></name> <name><surname>Bower</surname> <given-names>K. J.</given-names></name> <name><surname>Adair</surname> <given-names>B.</given-names></name> <name><surname>Pua</surname> <given-names>Y. H.</given-names></name> <name><surname>Williams</surname> <given-names>G. P.</given-names></name><etal/></person-group> (<year>2015</year>). <article-title>Assessment of lower limb muscle strength and power using hand-held and fixed dynamometry: a reliability and validity study.</article-title> <source><italic>PLoS One</italic></source> <volume>10</volume>:<fpage>e0140822</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0140822</pub-id> <pub-id pub-id-type="pmid">26509265</pub-id></citation></ref>
<ref id="B52"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Middleton</surname> <given-names>A.</given-names></name> <name><surname>Ortiz-Catalan</surname> <given-names>M.</given-names></name></person-group> (<year>2020</year>). <article-title>Neuromusculoskeletal arm prostheses: personal and social implications of living with an intimately integrated bionic arm.</article-title> <source><italic>Front. Neurorobot.</italic></source> <volume>14</volume>:<fpage>39</fpage>. <pub-id pub-id-type="doi">10.3389/fnbot.2020.00039</pub-id> <pub-id pub-id-type="pmid">32792933</pub-id></citation></ref>
<ref id="B53"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mohd Razali</surname> <given-names>N.</given-names></name> <name><surname>Bee Wah</surname> <given-names>Y.</given-names></name></person-group> (<year>2011</year>). <article-title>Power comparisons of Shapiro-Wilk, Kolmogorov-Smirnov, Lilliefors and Anderson-Darling tests.</article-title> <source><italic>J. Stat. Model. Anal.</italic></source> <volume>2</volume> <fpage>13</fpage>&#x2013;<lpage>14</lpage>.</citation></ref>
<ref id="B54"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Moreira</surname> <given-names>J. V. S.</given-names></name> <name><surname>Rodrigues</surname> <given-names>K. A.</given-names></name> <name><surname>Pinheiro</surname> <given-names>D. J. L.</given-names></name> <name><surname>Santos</surname> <given-names>T. C.</given-names></name> <name><surname>Vieira</surname> <given-names>J. L.</given-names></name> <name><surname>Cavalheiro</surname> <given-names>E. A.</given-names></name><etal/></person-group> (<year>2021</year>). <article-title>Electromyography biofeedback system with visual and vibratory feedbacks designed for lower limb rehabilitation.</article-title> <source><italic>arXiv</italic> [Preprint].</source> <comment>arXiv:2103.04034.</comment></citation></ref>
<ref id="B55"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pavone</surname> <given-names>E. F.</given-names></name> <name><surname>Tieri</surname> <given-names>G.</given-names></name> <name><surname>Rizza</surname> <given-names>G.</given-names></name> <name><surname>Tidoni</surname> <given-names>E.</given-names></name> <name><surname>Grisoni</surname> <given-names>L.</given-names></name> <name><surname>Aglioti</surname> <given-names>S. M.</given-names></name></person-group> (<year>2016</year>). <article-title>Embodying others in immersive virtual reality: electro-cortical signatures of monitoring the errors in the actions of an avatar seen from a first-person perspective.</article-title> <source><italic>J. Neurosci.</italic></source> <volume>36</volume> <fpage>268</fpage>&#x2013;<lpage>279</lpage>. <pub-id pub-id-type="doi">10.1523/JNEUROSCI.0494-15.2016</pub-id> <pub-id pub-id-type="pmid">26758821</pub-id></citation></ref>
<ref id="B56"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pedrinelli</surname> <given-names>A.</given-names></name></person-group> (<year>2004</year>). <source><italic>Tratamento do Paciente com Amputa&#x00E7;&#x00E3;o</italic></source>, <edition>1st Edn</edition>. <publisher-loc>S&#x00E3;o Paulo</publisher-loc>: <publisher-name>Roca</publisher-name>.</citation></ref>
<ref id="B57"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Petrini</surname> <given-names>F. M.</given-names></name> <name><surname>Valle</surname> <given-names>G.</given-names></name> <name><surname>Bumbasirevic</surname> <given-names>M.</given-names></name> <name><surname>Barberi</surname> <given-names>F.</given-names></name> <name><surname>Bortolotti</surname> <given-names>D.</given-names></name> <name><surname>Cvancara</surname> <given-names>P.</given-names></name><etal/></person-group> (<year>2019</year>). <article-title>Enhancing functional abilities and cognitive integration of the lower limb prosthesis.</article-title> <source><italic>Sci. Transl. Med.</italic></source> <volume>11</volume>:<fpage>eaav8939</fpage>. <pub-id pub-id-type="doi">10.1126/scitranslmed.aav8939</pub-id> <pub-id pub-id-type="pmid">31578244</pub-id></citation></ref>
<ref id="B58"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Proske</surname> <given-names>U.</given-names></name> <name><surname>Gandevia</surname> <given-names>S. C.</given-names></name></person-group> (<year>2012</year>). <article-title>The proprioceptive senses: their roles in signaling body shape, body position and movement, and muscle force.</article-title> <source><italic>Physiol. Rev.</italic></source> <volume>92</volume> <fpage>1651</fpage>&#x2013;<lpage>1697</lpage>. <pub-id pub-id-type="doi">10.1152/physrev.00048.2011</pub-id> <pub-id pub-id-type="pmid">23073629</pub-id></citation></ref>
<ref id="B59"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Qian</surname> <given-names>J.</given-names></name> <name><surname>McDonough</surname> <given-names>D. J.</given-names></name> <name><surname>Gao</surname> <given-names>Z.</given-names></name></person-group> (<year>2020</year>). <article-title>The effectiveness of virtual reality exercise on individual&#x2019;s physiological, psychological and rehabilitative outcomes: a systematic review.</article-title> <source><italic>Int. J. Environ. Res. Public Health</italic></source> <volume>17</volume> <fpage>1</fpage>&#x2013;<lpage>17</lpage>. <pub-id pub-id-type="doi">10.3390/ijerph17114133</pub-id> <pub-id pub-id-type="pmid">32531906</pub-id></citation></ref>
<ref id="B60"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Raspopovic</surname> <given-names>S.</given-names></name> <name><surname>Capogrosso</surname> <given-names>M.</given-names></name> <name><surname>Petrini</surname> <given-names>F. M.</given-names></name> <name><surname>Bonizzato</surname> <given-names>M.</given-names></name> <name><surname>Rigosa</surname> <given-names>J.</given-names></name> <name><surname>Di Pino</surname> <given-names>G.</given-names></name><etal/></person-group> (<year>2014</year>). <article-title>Restoring natural sensory feedback in real-time bidirectional hand prostheses.</article-title> <source><italic>Sci. Transl. Med.</italic></source> <volume>6</volume>:<fpage>222ra19</fpage>. <pub-id pub-id-type="doi">10.1126/scitranslmed.3006820</pub-id> <pub-id pub-id-type="pmid">24500407</pub-id></citation></ref>
<ref id="B61"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Riemann</surname> <given-names>B.</given-names></name> <name><surname>Lephart</surname> <given-names>S.</given-names></name></person-group> (<year>2002</year>). <article-title>The sensorimotor system, part II: the role of proprioception in motor control and functional joint stability.</article-title> <source><italic>J. Athl. Train.</italic></source> <volume>37</volume> <fpage>80</fpage>&#x2013;<lpage>84</lpage>.</citation></ref>
<ref id="B62"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sarmento</surname> <given-names>A.</given-names></name></person-group> (<year>2009</year>). <source>Apresenta&#x00E7;&#x00E3;o e Aplicabilidade da Vers&#x00E3;o Brasileira da MoCA (Montreal Cognitive Assessment)Para Rastreio de Comprometimento Cognitivo Leve</source>. Available online at: <ext-link ext-link-type="uri" xlink:href="http://repositorio.unifesp.br/bitstream/handle/11600/8967/Publico-00377.pdf?sequence=1&#x0026;isAllowed=y">http://repositorio.unifesp.br/bitstream/handle/11600/8967/Publico-00377.pdf?sequence=1&#x0026;isAllowed=y</ext-link> <comment>(accessed July 2017)</comment>.</citation></ref>
<ref id="B63"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Schiefer</surname> <given-names>M.</given-names></name> <name><surname>Tan</surname> <given-names>D.</given-names></name> <name><surname>Sidek</surname> <given-names>S. M.</given-names></name> <name><surname>Tyler</surname> <given-names>D. J.</given-names></name></person-group> (<year>2017</year>). <article-title>Sensory feedback by peripheral nerve stimulation improves task performance in individuals with upper limb loss using a myoelectric prosthesis.</article-title> <source><italic>J. Neural Eng.</italic></source> <volume>13</volume> <fpage>1</fpage>&#x2013;<lpage>25</lpage>. <pub-id pub-id-type="doi">10.1088/1741-2560/13/1/016001.Sensory</pub-id> <pub-id pub-id-type="pmid">26070898</pub-id></citation></ref>
<ref id="B64"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sebelius</surname> <given-names>F. C. P.</given-names></name> <name><surname>Ros&#x00E9;n</surname> <given-names>B. N.</given-names></name> <name><surname>Lundborg</surname> <given-names>G. N.</given-names></name></person-group> (<year>2005</year>). <article-title>Refined myoelectric control in below-elbow amputees using artificial neural networks and a data glove.</article-title> <source><italic>J. Hand Surg.</italic></source> <volume>30</volume> <fpage>780</fpage>&#x2013;<lpage>789</lpage>. <pub-id pub-id-type="doi">10.1016/j.jhsa.2005.01.002</pub-id> <pub-id pub-id-type="pmid">16039372</pub-id></citation></ref>
<ref id="B65"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Seng&#x00FC;l</surname> <given-names>A.</given-names></name> <name><surname>van Elk</surname> <given-names>M.</given-names></name> <name><surname>Rognini</surname> <given-names>G.</given-names></name> <name><surname>Aspell</surname> <given-names>J. E.</given-names></name> <name><surname>Bleuler</surname> <given-names>H.</given-names></name> <name><surname>Blanke</surname> <given-names>O.</given-names></name></person-group> (<year>2012</year>). <article-title>Extending the body to virtual tools using a robotic surgical interface: evidence from the crossmodal congruency task.</article-title> <source><italic>PLoS One</italic></source> <volume>7</volume>:<fpage>e49473</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0049473</pub-id> <pub-id pub-id-type="pmid">23227142</pub-id></citation></ref>
<ref id="B66"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Serino</surname> <given-names>A.</given-names></name> <name><surname>Bassolino</surname> <given-names>M.</given-names></name> <name><surname>Farn&#x00E8;</surname> <given-names>A.</given-names></name> <name><surname>L&#x00E0;davas</surname> <given-names>E.</given-names></name></person-group> (<year>2007</year>). <article-title>Extended multisensory space in blind cane users.</article-title> <source><italic>Psychol. Sci.</italic></source> <volume>18</volume> <fpage>642</fpage>&#x2013;<lpage>648</lpage>. <pub-id pub-id-type="doi">10.1111/j.1467-9280.2007.01952.x</pub-id> <pub-id pub-id-type="pmid">17614874</pub-id></citation></ref>
<ref id="B67"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Shokur</surname> <given-names>S.</given-names></name> <name><surname>Gallo</surname> <given-names>S.</given-names></name> <name><surname>Moioli</surname> <given-names>R. C.</given-names></name> <name><surname>Donati</surname> <given-names>A. R. C.</given-names></name> <name><surname>Morya</surname> <given-names>E.</given-names></name> <name><surname>Bleuler</surname> <given-names>H.</given-names></name><etal/></person-group> (<year>2016</year>). <article-title>Assimilation of virtual legs and perception of floor texture by complete paraplegic patients receiving artificial tactile feedback.</article-title> <source><italic>Sci. Rep.</italic></source> <volume>6</volume> <fpage>1</fpage>&#x2013;<lpage>14</lpage>. <pub-id pub-id-type="doi">10.1038/srep32293</pub-id> <pub-id pub-id-type="pmid">27640345</pub-id></citation></ref>
<ref id="B68"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Siegle</surname> <given-names>J. H.</given-names></name> <name><surname>L&#x00F3;pez</surname> <given-names>A. C.</given-names></name> <name><surname>Patel</surname> <given-names>Y. A.</given-names></name> <name><surname>Abramov</surname> <given-names>K.</given-names></name> <name><surname>Ohayon</surname> <given-names>S.</given-names></name> <name><surname>Voigts</surname> <given-names>J.</given-names></name></person-group> (<year>2017</year>). <article-title>Open ephys: an open-source, plugin-based platform for multichannel electrophysiology.</article-title> <source><italic>J. Neural Eng.</italic></source> <volume>14</volume>:<fpage>045003</fpage>. <pub-id pub-id-type="doi">10.1088/1741-2552/aa5eea</pub-id> <pub-id pub-id-type="pmid">28169219</pub-id></citation></ref>
<ref id="B69"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sime</surname> <given-names>D. W.</given-names></name></person-group> (<year>2019</year>). <article-title>Potential application of virtual reality for interface customisation (and pre-training) of amputee patients as preparation for prosthetic use.</article-title> <source><italic>Adv. Exp. Med. Biol.</italic></source> <volume>1120</volume> <fpage>15</fpage>&#x2013;<lpage>24</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-030-06070-1_2</pub-id></citation></ref>
<ref id="B70"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sitaram</surname> <given-names>R.</given-names></name> <name><surname>Ros</surname> <given-names>T.</given-names></name> <name><surname>Stoeckel</surname> <given-names>L.</given-names></name> <name><surname>Haller</surname> <given-names>S.</given-names></name> <name><surname>Scharnowski</surname> <given-names>F.</given-names></name> <name><surname>Lewis-Peacock</surname> <given-names>J.</given-names></name><etal/></person-group> (<year>2017</year>). <article-title>Closed-loop brain training: the science of neurofeedback.</article-title> <source><italic>Nat. Rev. Neurosci.</italic></source> <volume>18</volume> <fpage>86</fpage>&#x2013;<lpage>100</lpage>. <pub-id pub-id-type="doi">10.1038/nrn.2016.164</pub-id> <pub-id pub-id-type="pmid">28003656</pub-id></citation></ref>
<ref id="B71"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Skinner</surname> <given-names>B. F.</given-names></name></person-group> (<year>1938</year>). <source><italic>The Behavior of Organisms: An Experimental Analysis.</italic></source> <publisher-loc>New York, NY</publisher-loc>: <publisher-name>Appleton-Century-Crofts</publisher-name>.</citation></ref>
<ref id="B72"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Slater</surname> <given-names>M.</given-names></name> <name><surname>Perez-marcos</surname> <given-names>D.</given-names></name> <name><surname>Ehrsson</surname> <given-names>H. H.</given-names></name></person-group> (<year>2009</year>). <article-title>Inducing illusory ownership of a virtual body.</article-title> <source><italic>Front. Neurosci.</italic></source> <volume>3</volume> <fpage>214</fpage>&#x2013;<lpage>220</lpage>. <pub-id pub-id-type="doi">10.3389/neuro.01.029.2009</pub-id> <pub-id pub-id-type="pmid">20011144</pub-id></citation></ref>
<ref id="B73"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Slater</surname> <given-names>M.</given-names></name> <name><surname>Spanlang</surname> <given-names>B.</given-names></name> <name><surname>Sanchez-Vives</surname> <given-names>M. V.</given-names></name> <name><surname>Blanke</surname> <given-names>O.</given-names></name></person-group> (<year>2010</year>). <article-title>First person experience of body transfer in virtual reality.</article-title> <source><italic>PLoS One</italic></source> <volume>5</volume>:<fpage>e10564</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0010564</pub-id> <pub-id pub-id-type="pmid">20485681</pub-id></citation></ref>
<ref id="B74"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Staudenmann</surname> <given-names>D.</given-names></name> <name><surname>Roeleveld</surname> <given-names>K.</given-names></name> <name><surname>Stegeman</surname> <given-names>D. F.</given-names></name> <name><surname>van Dieen</surname> <given-names>J. H.</given-names></name></person-group> (<year>2010</year>). <article-title>Methodological aspects of SEMG recordings for force estimation&#x2013;a tutorial and review.</article-title> <source><italic>J. Electromyogr. Kinesiol.</italic></source> <volume>20</volume> <fpage>375</fpage>&#x2013;<lpage>387</lpage>. <pub-id pub-id-type="doi">10.1016/j.jelekin.2009.08.005</pub-id> <pub-id pub-id-type="pmid">19758823</pub-id></citation></ref>
<ref id="B75"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Stegeman</surname> <given-names>D.</given-names></name> <name><surname>Hermens</surname> <given-names>H.</given-names></name></person-group> (<year>2007</year>). <source><italic>Standards for Surface Electromyography: The European Project Surface EMG for non-Invasive Assessment of Muscles (SENIAM)</italic></source>, <fpage>108</fpage>&#x2013;<lpage>112</lpage>. Available online at: <ext-link ext-link-type="uri" xlink:href="https://www.researchgate.net/profile/Hermie-Hermens/publication/228486725_Standards_for_suface_electromyography_The_European_project_Surface_EMG_for_non-invasive_assessment_of_muscles_SENIAM">https://www.researchgate.net/profile/Hermie-Hermens/publication/228486725_Standards_for_suface_electromyography_The_European_project_Surface_EMG_for_non-invasive_assessment_of_muscles_SENIAM</ext-link> <comment>(accessed June 2017)</comment>.</citation></ref>
<ref id="B76"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tieri</surname> <given-names>G.</given-names></name> <name><surname>Tidoni</surname> <given-names>E.</given-names></name> <name><surname>Pavone</surname> <given-names>E. F.</given-names></name> <name><surname>Aglioti</surname> <given-names>S. M.</given-names></name></person-group> (<year>2015</year>). <article-title>Body visual discontinuity affects feeling of ownership and skin conductance responses.</article-title> <source><italic>Sci. Rep.</italic></source> <volume>5</volume> <fpage>1</fpage>&#x2013;<lpage>8</lpage>. <pub-id pub-id-type="doi">10.1038/srep17139</pub-id> <pub-id pub-id-type="pmid">26602036</pub-id></citation></ref>
<ref id="B77"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tsakiris</surname> <given-names>M.</given-names></name> <name><surname>Hesse</surname> <given-names>M. D.</given-names></name> <name><surname>Boy</surname> <given-names>C.</given-names></name> <name><surname>Haggard</surname> <given-names>P.</given-names></name> <name><surname>Fink</surname> <given-names>G. R.</given-names></name></person-group> (<year>2007a</year>). <article-title>Neural signatures of body ownership: a sensory network for bodily self-consciousness.</article-title> <source><italic>Cereb. Cortex</italic></source> <volume>17</volume> <fpage>2235</fpage>&#x2013;<lpage>2244</lpage>. <pub-id pub-id-type="doi">10.1093/cercor/bhl131</pub-id> <pub-id pub-id-type="pmid">17138596</pub-id></citation></ref>
<ref id="B78"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tsakiris</surname> <given-names>M.</given-names></name> <name><surname>Sch&#x00FC;tz-Bosbach</surname> <given-names>S.</given-names></name> <name><surname>Gallagher</surname> <given-names>S.</given-names></name></person-group> (<year>2007b</year>). <article-title>On agency and body-ownership: phenomenological and neurocognitive reflections.</article-title> <source><italic>Conscious. Cogn.</italic></source> <volume>16</volume> <fpage>645</fpage>&#x2013;<lpage>660</lpage>. <pub-id pub-id-type="doi">10.1016/j.concog.2007.05.012</pub-id> <pub-id pub-id-type="pmid">17616469</pub-id></citation></ref>
<ref id="B79"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Van Den Heiligenberg</surname> <given-names>F. M. Z.</given-names></name> <name><surname>Orlov</surname> <given-names>T.</given-names></name> <name><surname>MacDonald</surname> <given-names>S. N.</given-names></name> <name><surname>Duff</surname> <given-names>E. P.</given-names></name> <name><surname>Henderson Slater</surname> <given-names>D.</given-names></name> <name><surname>Beckmann</surname> <given-names>C. F.</given-names></name><etal/></person-group> (<year>2018</year>). <article-title>Artificial limb representation in amputees.</article-title> <source><italic>Brain</italic></source> <volume>141</volume> <fpage>1422</fpage>&#x2013;<lpage>1433</lpage>. <pub-id pub-id-type="doi">10.1093/brain/awy054</pub-id> <pub-id pub-id-type="pmid">29534154</pub-id></citation></ref>
<ref id="B80"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Van Elk</surname> <given-names>M.</given-names></name> <name><surname>Forget</surname> <given-names>J.</given-names></name> <name><surname>Blanke</surname> <given-names>O.</given-names></name></person-group> (<year>2013</year>). <article-title>The effect of limb crossing and limb congruency on multisensory integration in peripersonal space for the upper and lower extremities.</article-title> <source><italic>Conscious. Cogn.</italic></source> <volume>22</volume> <fpage>545</fpage>&#x2013;<lpage>555</lpage>. <pub-id pub-id-type="doi">10.1016/j.concog.2013.02.006</pub-id> <pub-id pub-id-type="pmid">23579198</pub-id></citation></ref>
<ref id="B81"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wijk</surname> <given-names>U.</given-names></name> <name><surname>Carlsson</surname> <given-names>I.</given-names></name></person-group> (<year>2015</year>). <article-title>Forearm amputees&#x2019; views of prosthesis use and sensory feedback.</article-title> <source><italic>J. Hand Ther.</italic></source> <volume>28</volume> <fpage>269</fpage>&#x2013;<lpage>278</lpage>. <pub-id pub-id-type="doi">10.1016/j.jht.2015.01.013</pub-id> <pub-id pub-id-type="pmid">25990442</pub-id></citation></ref>
<ref id="B82"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wolpert</surname> <given-names>D. M.</given-names></name> <name><surname>Ghahramani</surname> <given-names>Z.</given-names></name></person-group> (<year>2000</year>). <article-title>Computational principles of movement neuroscience.</article-title> <source><italic>Nat. Neurosci.</italic></source> <volume>3</volume> <fpage>1212</fpage>&#x2013;<lpage>1217</lpage>. <pub-id pub-id-type="doi">10.1038/81497</pub-id> <pub-id pub-id-type="pmid">11127840</pub-id></citation></ref>
<ref id="B83"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wolpert</surname> <given-names>D. M.</given-names></name> <name><surname>Diedrichsen</surname> <given-names>J.</given-names></name> <name><surname>Flanagan</surname> <given-names>J. R.</given-names></name></person-group> (<year>2011</year>). <article-title>Principles of sensorimotor learning.</article-title> <source><italic>Nat. Rev. Neurosci.</italic></source> <volume>12</volume> <fpage>739</fpage>&#x2013;<lpage>751</lpage>. <pub-id pub-id-type="doi">10.1038/nrn3112</pub-id> <pub-id pub-id-type="pmid">22033537</pub-id></citation></ref>
<ref id="B84"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wolpert</surname> <given-names>D. M.</given-names></name> <name><surname>Ghahramani</surname> <given-names>Z.</given-names></name> <name><surname>Jordan</surname> <given-names>M.</given-names></name></person-group> (<year>1995</year>). <article-title>An internal model for sensorimotor integration.</article-title> <source><italic>Science</italic></source> <volume>269</volume> <fpage>1880</fpage>&#x2013;<lpage>1882</lpage>.</citation></ref>
<ref id="B85"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Yuan</surname> <given-names>Y.</given-names></name> <name><surname>Steed</surname> <given-names>A.</given-names></name></person-group> (<year>2010</year>). &#x201C;<article-title>Is the rubber hand illusion induced by immersive virtual reality?</article-title>,&#x201D; in <source><italic>Proceedings of the IEEE Virtual Reality Conference</italic></source> (<publisher-loc>Boston, MA</publisher-loc>: <publisher-name>IEEE</publisher-name>), <fpage>95</fpage>&#x2013;<lpage>102</lpage>. <pub-id pub-id-type="doi">10.1109/VR.2010.5444807</pub-id></citation></ref>
<ref id="B86"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zbinden</surname> <given-names>J.</given-names></name> <name><surname>Lendaro</surname> <given-names>E.</given-names></name> <name><surname>Ortiz-Catalan</surname> <given-names>M.</given-names></name></person-group> (<year>2022</year>). <article-title>Prosthetic embodiment: systematic review on definitions, measures, and experimental paradigms.</article-title> <source><italic>J. Neuroeng. Rehabil.</italic></source> <volume>19</volume> <fpage>1</fpage>&#x2013;<lpage>16</lpage>. <pub-id pub-id-type="doi">10.1186/s12984-022-01006-6</pub-id> <pub-id pub-id-type="pmid">35346251</pub-id></citation></ref>
</ref-list>
</back>
</article>