<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article article-type="research-article" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Digit. Health</journal-id>
<journal-title>Frontiers in Digital Health</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Digit. Health</abbrev-journal-title>
<issn pub-type="epub">2673-253X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fdgth.2024.1324511</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Digital Health</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>A marker-less human motion analysis system for motion-based biomarker identification and quantification in knee disorders</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes"><name><surname>Armstrong</surname><given-names>Kai</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="corresp" rid="cor1">&#x002A;</xref><uri xlink:href="https://loop.frontiersin.org/people/2548537/overview"/><role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/><role content-type="https://credit.niso.org/contributor-roles/data-curation/"/><role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/><role content-type="https://credit.niso.org/contributor-roles/investigation/"/><role content-type="https://credit.niso.org/contributor-roles/methodology/"/><role content-type="https://credit.niso.org/contributor-roles/validation/"/><role content-type="https://credit.niso.org/contributor-roles/visualization/"/><role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/><role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/><role content-type="https://credit.niso.org/contributor-roles/software/"/></contrib>
<contrib contrib-type="author"><name><surname>Zhang</surname><given-names>Lei</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref><role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/><role content-type="https://credit.niso.org/contributor-roles/project-administration/"/><role content-type="https://credit.niso.org/contributor-roles/resources/"/><role content-type="https://credit.niso.org/contributor-roles/supervision/"/><role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/><role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/><role content-type="https://credit.niso.org/contributor-roles/investigation/"/></contrib>
<contrib contrib-type="author"><name><surname>Wen</surname><given-names>Yan</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/></contrib>
<contrib contrib-type="author"><name><surname>Willmott</surname><given-names>Alexander P.</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref><uri xlink:href="https://loop.frontiersin.org/people/2604245/overview" />
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author"><name><surname>Lee</surname><given-names>Paul</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref><role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/><role content-type="https://credit.niso.org/contributor-roles/data-curation/"/><role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/><role content-type="https://credit.niso.org/contributor-roles/supervision/"/><role content-type="https://credit.niso.org/contributor-roles/validation/"/><role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/></contrib>
<contrib contrib-type="author" corresp="yes"><name><surname>Ye</surname><given-names>Xujiong</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="corresp" rid="cor1">&#x002A;</xref><uri xlink:href="https://loop.frontiersin.org/people/1121566/overview" /><role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/><role content-type="https://credit.niso.org/contributor-roles/project-administration/"/><role content-type="https://credit.niso.org/contributor-roles/resources/"/><role content-type="https://credit.niso.org/contributor-roles/supervision/"/><role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/></contrib>
</contrib-group>
<aff id="aff1"><label><sup>1</sup></label><institution>Laboratory of Vision Engineering, School of Computer Science, University of Lincoln</institution>, <addr-line>Lincoln</addr-line>, <country>United Kingdom</country></aff>
<aff id="aff2"><label><sup>2</sup></label><institution>School of Sport and Exercise Science, University of Lincoln</institution>, <addr-line>Lincoln</addr-line>, <country>United Kingdom</country></aff>
<aff id="aff3"><label><sup>3</sup></label><addr-line>MSK Doctors</addr-line>, <addr-line>Sleaford</addr-line>, <country>United Kingdom</country></aff>
<author-notes>
<fn fn-type="edited-by"><p><bold>Edited by:</bold> Ramin Ramezani, University of California, Los Angeles, United States</p></fn>
<fn fn-type="edited-by"><p><bold>Reviewed by:</bold> Wan Shi Low, University of Malaya, Malaysia</p>
<p>Nicola Bombieri, University of Verona, Italy</p></fn>
<corresp id="cor1"><label>&#x002A;</label><bold>Correspondence:</bold> Kai Armstrong <email>karmstrong@lincoln.ac.uk</email> Xujiong Ye <email>xye@lincoln.ac.uk</email></corresp>
</author-notes>
<pub-date pub-type="epub"><day>23</day><month>01</month><year>2024</year></pub-date>
<pub-date pub-type="collection"><year>2024</year></pub-date>
<volume>6</volume><elocation-id>1324511</elocation-id>
<history>
<date date-type="received"><day>19</day><month>10</month><year>2023</year></date>
<date date-type="accepted"><day>09</day><month>01</month><year>2024</year></date>
</history>
<permissions>
<copyright-statement>&#x00A9; 2024 Armstrong, Zhang, Wen, Willmott, Lee and Ye.</copyright-statement>
<copyright-year>2024</copyright-year><copyright-holder>Armstrong, Zhang, Wen, Willmott, Lee and Ye</copyright-holder><license license-type="open-access" xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="http://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p></license>
</permissions>
<abstract>
<p>In recent years the healthcare industry has had increased difficulty seeing all low-risk patients, including but not limited to suspected osteoarthritis (OA) patients. To help address the increased waiting lists and shortages of staff, we propose a novel method of automated biomarker identification and quantification for the monitoring of treatment or disease progression through the analysis of clinical motion data captured from a standard RGB video camera. The proposed method allows for the measurement of biomechanics information and analysis of their clinical significance, in both a cheap and sensitive alternative to the traditional motion capture techniques. These methods and results validate the capabilities of standard RGB cameras in clinical environments to capture clinically relevant motion data. Our method focuses on generating 3D human shape and pose from 2D video data via adversarial training in a deep neural network with a self-attention mechanism to encode both spatial and temporal information. Biomarker identification using Principal Component Analysis (PCA) allows the production of representative features from motion data and uses these to generate a clinical report automatically. These new biomarkers can then be used to assess the success of treatment and track the progress of rehabilitation or to monitor the progression of the disease. These methods have been validated with a small clinical study, by administering a local anaesthetic to a small population with knee pain, this allows these new representative biomarkers to be validated as statistically significant (<inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM1"><mml:mi>p</mml:mi></mml:math></inline-formula>-value <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM2"><mml:mo>&#x003C;</mml:mo><mml:mn>0.05</mml:mn></mml:math></inline-formula>). These significant biomarkers include the cumulative acceleration of elbow flexion/extension in a sit-to-stand, as well as the smoothness of the knee and elbow flexion/extension in both a squat and sit-to-stand.</p>
</abstract>
<kwd-group>
<kwd>biomarkers</kwd>
<kwd>biomechanics</kwd>
<kwd>machine learning</kwd>
<kwd>human pose estimation</kwd>
<kwd>human mesh recovery</kwd>
<kwd>patient monitoring</kwd>
<kwd>automated rehabilitation</kwd>
</kwd-group><contract-num rid="cn001">&#x00A0;</contract-num><contract-sponsor id="cn001">EPSRC Doctoral Training Partnership</contract-sponsor><counts>
<fig-count count="9"/>
<table-count count="2"/><equation-count count="55"/><ref-count count="44"/><page-count count="0"/><word-count count="0"/></counts><custom-meta-wrap><custom-meta><meta-name>section-at-acceptance</meta-name><meta-value>Health Informatics</meta-value></custom-meta></custom-meta-wrap>
</article-meta>
</front>
<body><sec id="s1" sec-type="intro"><label>1</label><title>Introduction</title>
<p>The knee, a remarkable yet vulnerable joint, stands as one of the most frequently afflicted areas in the human body. Amid its susceptibility to injuries, knee osteoarthritis (OA) emerges as the most prevalent joint disorder in the United States, affecting a substantial portion of the population (<xref ref-type="bibr" rid="B1">1</xref>). This widespread prevalence underscores the critical need for effective diagnostic and intervention strategies in the realm of knee health. Factors such as age, weight, and occupation contribute to the risk of developing knee OA (<xref ref-type="bibr" rid="B2">2</xref>). In the United Kingdom, the burden of knee OA is evidenced by over 90,000 total knee replacements annually, a testament to the impact on individuals&#x2019; daily lives and the strain on healthcare systems (<xref ref-type="bibr" rid="B3">3</xref>, <xref ref-type="bibr" rid="B4">4</xref>). As these interventions come at a considerable cost, surpassing &#x00A3;7,000 on average per procedure or a cost per Quality-adjusted Life Year (QALY) gained exceeding &#x00A3;1,300, the economic implications are significant, with the UK&#x2019;s National Health Service (NHS) expending over &#x00A3;600 million annually on knee-related procedures (<xref ref-type="bibr" rid="B5">5</xref>). Traditionally, osteoarthritis has been diagnosed with magnetic resonance imaging as shown in <xref ref-type="fig" rid="F1">Figure 1</xref>. This brings economic and healthcare burdens and necessitates innovative approaches to both diagnosis and treatment, paving the way for advancements in medical imaging and motion analysis techniques.</p>
<fig id="F1" position="float"><label>Figure 1</label>
<caption><p>Two examples of knee MRIs in the sagittal view from this study&#x2019;s patients to highlight the severity of osteoarthritis, as shown by the lack of cartilage around the knee joint.</p></caption>
<graphic xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="fdgth-06-1324511-g001.tif"/>
</fig>
<p>Expanding on the diagnostic landscape, recent developments in marker-based motion capture (MoCap) have provided valuable insights into the biomechanics of the knee and its relationship to osteoarthritis. Utilising retroreflective markers, marker-based MoCap enables precise tracking of joint movements, allowing for a detailed analysis of gait patterns, joint kinematics, and overall knee function (<xref ref-type="bibr" rid="B6">6</xref>&#x2013;<xref ref-type="bibr" rid="B8">8</xref>). Studies by Sparkes et al. and Duffell et al. have demonstrated the efficacy of gait analysis in distinguishing individuals with knee osteoarthritis from healthy counterparts, highlighting the potential of MoCap as a diagnostic tool (<xref ref-type="bibr" rid="B6">6</xref>, <xref ref-type="bibr" rid="B7">7</xref>). Moreover, Robbins et al. delved into risk factors associated with both non-traumatic and post-traumatic knee osteoarthritis, showcasing the versatility of marker-based MoCap in understanding disease progression (<xref ref-type="bibr" rid="B8">8</xref>).</p>
<p>While marker-based MoCap provides valuable data, its adoption in clinical settings presents challenges, including the need for highly trained experts, time-consuming data collection, and the demand for dedicated laboratory spaces (<xref ref-type="bibr" rid="B9">9</xref>&#x2013;<xref ref-type="bibr" rid="B11">11</xref>). Despite these limitations, marker-based MoCap remains a cornerstone in biomechanics research, offering unparalleled insights into knee function and pathology. Recent strides in technology have furthered the field with a promising era of marker-less motion capture. Fueled by advancements in human pose estimation techniques, this innovative approach provides a compelling alternative for the analysis of human movement, particularly in clinical settings (<xref ref-type="bibr" rid="B12">12</xref>). Unlike marker-based methods that rely on the placement and tracking of retroreflective markers, marker-less motion capture extracts intricate details of human pose directly from standard RGB images and videos (<xref ref-type="bibr" rid="B13">13</xref>). This transition from marker-based to marker-less motion capture introduces new possibilities while addressing some of the challenges associated with traditional marker-based approaches.</p>
<p>Marker-less motion capture, while helping alleviate some of these problems, serves as an effective tool in the analysis of human pose, particularly in uncontrolled environments (<xref ref-type="bibr" rid="B12">12</xref>). However, applying these techniques in a clinical setting requires a reproducible result in any setting. The primary challenge lies in the difficulty of controlling the clinical environment. Many clinical locations have natural lighting whose directions vary throughout the day, potentially leading to incorrect human motion sequences (<xref ref-type="bibr" rid="B14">14</xref>). Additionally, variations in patient attire and potential occlusion of body parts due to clothing further complicate marker-less motion capture and biomechanics analysis (<xref ref-type="bibr" rid="B15">15</xref>&#x2013;<xref ref-type="bibr" rid="B17">17</xref>). These conditions must be meticulously controlled to minimise data variation. Moreover, each patient has different functional capabilities, necessitating careful selection of actions to develop a series of tests that everyone can perform.</p>
<p>In response to these challenges, this study aims to introduce an automated system designed for the analysis of clinical motion data captured from a standard 2D RGB video camera. This system aims not only to quantify, identify, and measure objective diagnostic biomarkers but also to present a faster and more cost-effective alternative to the current gold standard of marker-based MoCap. To address the complexities and variabilities inherent in clinical data capture environments, our approach involves the development of a fully end-to-end marker-less biomechanics solution. We acknowledge the intricate nature of clinical tests, the variability in data capture environments, and the complexity of result analysis. To tackle these hurdles, we propose a simple yet robust protocol and framework designed to function in any environment. By adhering to a standardised protocol endorsed by medical professionals, our approach not only reduces variability in data but also ensures clinical relevance throughout the biomechanics analysis process</p>
<p>This approach utilises human pose estimation, a process of locating the positions of human joints from images and videos (<xref ref-type="bibr" rid="B18">18</xref>). The technique, owing to its portability, low cost, and accessibility, has gained prominence in the fields of sports science and clinical biomechanics analysis (<xref ref-type="bibr" rid="B19">19</xref>). Recent advancements in human pose estimation have facilitated rapid and accurate marker-less MoCap using standard RGB images and videos (<xref ref-type="bibr" rid="B13">13</xref>). These innovations include extracting a 3D mesh of a person from the 2D RGB image, enhancing anatomical detail for face, body, and hand features. Notably, the current standards for the 3D mesh model, such as SMPL (Skinned Multi Person Linear) and SMPL-X, undergo continuous improvement and iterations depending on the desired use (<xref ref-type="bibr" rid="B20">20</xref>). When applied to videos, these SMPL models utilise a motion discriminator generative adversarial network (GAN), enabling a model that accurately represents human motion with assistance from the temporal domain (<xref ref-type="bibr" rid="B21">21</xref>, <xref ref-type="bibr" rid="B22">22</xref>).</p>
<p>While the aforementioned advancements in human pose estimation and marker-less motion capture have significantly enhanced our ability to capture intricate details of human motion, their application extends beyond sports science and general biomechanics. In particular, these technologies lay the foundation for a transformative approach in clinical settings, where the need for precise motion analysis holds significant implications. However, despite the strides made in accurately capturing human pose data, the clinical significance of such data for specific conditions, such as knee disorders, has been an under-explored territory (<xref ref-type="bibr" rid="B23">23</xref>, <xref ref-type="bibr" rid="B24">24</xref>).</p>
<p>Building on this gap in research, our methodology takes a crucial step in addressing the need for a comprehensive analysis of clinical motion data. By specifically tailoring our approach to assess motion data before and after treatment, we not only contribute to the development of effective intervention strategies but also provide a means for tracking rehabilitation progress and evaluating the progression of knee disorders. The integration of advanced human pose estimation and marker-less motion capture techniques, as demonstrated in the preceding paragraph, forms the cornerstone of our sophisticated and clinically relevant approach to capturing and interpreting human motion data.</p>
<p>In pursuit of these objectives, this study seeks to assuage concerns surrounding current methods employed in clinical environments. Our proposed approach leverages state-of-the-art marker-less motion capture systems combined with kinematics analysis, aiming to identify biomarkers and establish a robust framework for tracking disease progression or rehabilitation progress. Crafted to meet the unique demands of clinical settings&#x2014;fast, accessible, cost-effective, and portable&#x2014;our method incorporates manual feature calculation to provide explainable results to both patients and clinicians. Further enhancing interpretability, we employ Principal Component Analysis (PCA) to extract those features that have greatest power to discriminate between different conditions. The output from our proposed pipeline culminates in a medical report tailored for presentation to both clinicians and patients. To demonstrate the efficacy of our approach, we conclude with a small clinical case study, administering a local anaesthetic to a population with knee pain, resulting in the identification of novel motion-based biomarkers that are not only generalisable but also action-specific.</p>
</sec>
<sec id="s2"><label>2</label><title>Materials and method</title>
<p>The flow of data from collection to the extraction of clinically relevant and statistically significant biomechanics features is outlined in <xref ref-type="fig" rid="F2">Figure&#x00A0;2</xref>. The process begins with 1080p standard videos being recorded on an Azure Kinect RGB-D camera at 30 frames per second. The camera height was 1.2m and was placed at a distance of 3m away from the subject. Each video was recorded with the participant facing the camera, this reduces the effects of occluded joints on the motion capture technique. The participants performed at least 3 repeats each of a sit-to-stand and squat action, this protocol has been designed by clinicians to use simple actions that all participants can perform. Simultaneously, the diagnostic efficacy of the sit-to-stand and squat actions are substantiated by existing literature, thereby fortifying its diagnostic power (<xref ref-type="bibr" rid="B25">25</xref>&#x2013;<xref ref-type="bibr" rid="B27">27</xref>). For the squat, the participants were asked to squat as low as they could and then immediately return to a standing position. For the sit-to-stand; the participants were asked to stand up from a chair, use of arms was permitted out of safety concerns, then returned to a seated position.</p>
<fig id="F2" position="float"><label>Figure 2</label>
<caption><p>This represents the flow of the data from the source of the videos to the output of the statistical tests which allow the extraction of any significant data, this application has been applied to a clinical case study to examine the effectiveness of each technique when applied to intervention success.</p></caption>
<graphic xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="fdgth-06-1324511-g002.tif"/>
</fig>
<p>Inference of videos was performed using an Nvidia GTX 1080ti using a pre-trained checkpoint, where the average inference time is 40 seconds per 10 seconds of video.</p>
<sec id="s2a"><label>2.1</label><title>Clinical case study</title>
<p>To show the sensitivity of these methods in a clinical environment these techniques were performed on a small case study of 20 participants. The demographic for the study was kept broad to account for a variety of situations, the selection criteria were men and women over the age of 55 and a diagnosis of knee pain. It is important to note that it is almost impossible to isolate this to pain in a single limb, therefore, patients with bilateral knee pain were also recruited to the study as long as one side was found to be worse than the other. As age is a well-known risk factor for OA this increases the chance that their diagnosis is due to knee OA rather than an injury (<xref ref-type="bibr" rid="B28">28</xref>).</p>
<p>Each participant received a local anaesthetic injected into the knee with the diagnosed pain. This removes the psychological change to movement caused by pain, providing the biomechanics analysis with a clear before and after treatment and allowing us to assess the sensitivity of each capture method.</p>
<p>The study protocol was approved by the University of Lincoln Ethics, Governance &#x0026; Regulatory Compliance Committee, the study was performed following relevant institutional guidelines and regulations. All participants provided written informed consent before any data collection.</p>
</sec>
<sec id="s2b"><label>2.2</label><title>Human mesh recovery</title>
<p>The SMPL-X model used in this study consists of a total of 10,475 mesh vertices surrounding a skeleton of 54 joint locations. The model is defined by the function <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM3"><mml:mi>M</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>&#x03B8;</mml:mi><mml:mo>,</mml:mo><mml:mi>&#x03B2;</mml:mi><mml:mo>,</mml:mo><mml:mi>&#x03C8;</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mspace width="thinmathspace" /><mml:mo>&#x003A;</mml:mo><mml:mspace width="thinmathspace" /><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="double-struck">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mi>&#x03B8;</mml:mi><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mo>&#x00D7;</mml:mo><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mi>&#x03B2;</mml:mi><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mo>&#x00D7;</mml:mo><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mi>&#x03C8;</mml:mi><mml:mo fence="false" stretchy="false">|</mml:mo></mml:mrow></mml:msup><mml:mo stretchy="false">&#x2192;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="double-struck">R</mml:mi></mml:mrow></mml:mrow><mml:mn>3</mml:mn></mml:msup><mml:mi>N</mml:mi></mml:math></inline-formula>, where <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM4"><mml:mi>&#x03B8;</mml:mi></mml:math></inline-formula> represents the body pose, <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM5"><mml:mi>&#x03C8;</mml:mi></mml:math></inline-formula> represents the facial parameters, <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM6"><mml:mi>&#x03B2;</mml:mi></mml:math></inline-formula> represents the face and hand shape parameters, and <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM7"><mml:mi>N</mml:mi></mml:math></inline-formula> represents the denotes the number of frames in the video (<xref ref-type="bibr" rid="B20">20</xref>). This body model allows for inferences to be made from standard RGB images or image sequences, accounting for not only the pose of a subject but also the shape, providing more detail than a standard human pose estimation technique such as OpenPose or BlazePose (<xref ref-type="bibr" rid="B13">13</xref>, <xref ref-type="bibr" rid="B29">29</xref>).</p>
<p>The standard RGB videos were fed into a mesh reconstruction pipeline based on the Video Inference for the Human Body Pose and Shape Estimation (VIBE) model, which predicts the SMPL parameters of a given participant based on monocular RGB video. The adoption of the VIBE model in our experiment was due to its ability to encode both spatial and temporal cues into the data using adversarial training in a deep neural network with a self-attention mechanism (<xref ref-type="bibr" rid="B21">21</xref>). To ensure reproducibility and optimal accuracy, the training and implementation details were implemented using the parameters described by Kocabas et al. (<xref ref-type="bibr" rid="B21">21</xref>), including sequence length=16, temporal encoder=2-layer GRU with a hidden size of 1,024 and learning rate of <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM8"><mml:mn>5</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:msup><mml:mn>10</mml:mn><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mn>5</mml:mn></mml:mrow></mml:msup></mml:math></inline-formula> and an Adam optimiser, SMPL regressor=2 fully connected layers of size 1,024, motion discriminator=2-layer GRU with a hidden size of 1,024 and a learning rate of <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM9"><mml:mn>1</mml:mn><mml:mo>&#x00D7;</mml:mo><mml:msup><mml:mn>10</mml:mn><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mn>4</mml:mn></mml:mrow></mml:msup></mml:math></inline-formula>, and the self attention=2 MLP layers of size 1,024 with <italic>tanh</italic> activation. The model was trained using InstaVariety (<xref ref-type="bibr" rid="B30">30</xref>) as the 2D ground-truth dataset, MPI-INF-3D (<xref ref-type="bibr" rid="B31">31</xref>) as the ground-truth 3D dataset, and 3DPW as the 3D ground-truth dataset for evaluation purposes; this training consisted of 30 epochs with 500 iterations per epoch and a batch size of 32 (<xref ref-type="bibr" rid="B32">32</xref>).</p>
</sec>
<sec id="s2c"><label>2.3</label><title>Whole body kinematic feature extraction</title>
<p>A visual representation of the joint angles used can be seen in <xref ref-type="fig" rid="F3">Figure&#x00A0;3</xref>, this shows the joint angles selected in both the sagittal and coronal planes. Each of the joint angles, <italic>&#x03B8;</italic>, was calculated using a base formula as shown in <xref ref-type="disp-formula" rid="disp-formula1">Equation 1</xref>, where <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM10"><mml:mi>k</mml:mi></mml:math></inline-formula>, <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM11"><mml:mi>h</mml:mi></mml:math></inline-formula>, and <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM12"><mml:mi>a</mml:mi></mml:math></inline-formula> represent the joint centres in a given plane of motion. These joint angles were then calculated at every frame in the video to produce joint angle sequences. This was then performed for each of the participant&#x2019;s repeats, this allows the extrapolation of a mean, minimum, and maximum curve by comparing the kinematics at the same frame in each repeat. The decision was also made not to time normalise the data under the assumption that any extreme changes to the length of an action could be caused by the participant&#x2019;s clinical pathology.</p>
<fig id="F3" position="float"><label>Figure 3</label>
<caption><p>Visual representation of the joint angles used in this study, showing both the sagittal and coronal joint angles and the location of the joint centres used in the calculations for each angle.</p></caption>
<graphic xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="fdgth-06-1324511-g003.tif"/>
</fig>
<p>For vectors:<inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM13"><mml:mi>m</mml:mi><mml:mo>=</mml:mo><mml:mi>h</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>k</mml:mi></mml:math></inline-formula>, <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM14"><mml:mi>n</mml:mi><mml:mo>=</mml:mo><mml:mi>a</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>k</mml:mi></mml:math></inline-formula>, and <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM15"><mml:mi>p</mml:mi><mml:mo>=</mml:mo><mml:mi>h</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mi>a</mml:mi></mml:math></inline-formula><disp-formula id="disp-formula1"><label>(1)</label><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="DM1"><mml:mi>&#x03B8;</mml:mi><mml:mo>=</mml:mo><mml:msup><mml:mtext  mathvariant="italic">cos</mml:mtext><mml:mrow><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mrow><mml:mfrac><mml:mrow><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mi mathvariant="italic">m</mml:mi><mml:msup><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mn mathvariant="italic">2</mml:mn></mml:msup><mml:mo mathvariant="italic">+</mml:mo><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mi mathvariant="italic">n</mml:mi><mml:msup><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mn mathvariant="italic">2</mml:mn></mml:msup><mml:mo mathvariant="italic">&#x2212;</mml:mo><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mi mathvariant="italic">p</mml:mi><mml:msup><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mn mathvariant="italic">2</mml:mn></mml:msup></mml:mrow><mml:mrow><mml:mn mathvariant="italic">2</mml:mn><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mi mathvariant="italic">m</mml:mi><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mo fence="false" stretchy="false">|</mml:mo><mml:mi mathvariant="italic">n</mml:mi><mml:mo fence="false" stretchy="false">|</mml:mo></mml:mrow></mml:mfrac></mml:mrow><mml:mo>)</mml:mo></mml:mrow></mml:math></disp-formula>where <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM16"><mml:mi>k</mml:mi></mml:math></inline-formula>, <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM17"><mml:mi>h</mml:mi></mml:math></inline-formula>, and <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM18"><mml:mi>a</mml:mi></mml:math></inline-formula>, are the positions of the joint centres of any three given joints for example knee, hip, and ankle respectively can be used to calculate the knee angle and <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM19"><mml:mo>&#x2223;</mml:mo></mml:math></inline-formula>.<inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM20"><mml:mo>&#x2223;</mml:mo></mml:math></inline-formula> denotes the Euclidean distance between two points.<disp-formula id="disp-formula2"><label>(2)</label><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="DM2"><mml:msub><mml:mi>&#x03C3;</mml:mi><mml:mi>m</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:msqrt><mml:mrow><mml:mfrac><mml:mrow><mml:munderover><mml:mo>&#x2211;</mml:mo><mml:mrow><mml:mi>i</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>n</mml:mi></mml:mrow></mml:munderover><mml:mo stretchy="false">(</mml:mo><mml:msub><mml:mi>m</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mrow><mml:mover><mml:mi>m</mml:mi><mml:mo stretchy="false">&#x00AF;</mml:mo></mml:mover></mml:mrow><mml:msup><mml:mo stretchy="false">)</mml:mo><mml:mn>2</mml:mn></mml:msup></mml:mrow><mml:mrow><mml:mi>n</mml:mi><mml:mo>&#x2212;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:mfrac></mml:mrow></mml:msqrt></mml:math></disp-formula>The smoothness of the mean, maximum, and minimum curves can then be calculated from the standard deviation of the gradient of a curve, as shown in <xref ref-type="disp-formula" rid="disp-formula2">Equation 2</xref> where <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM21"><mml:mi>n</mml:mi></mml:math></inline-formula> is the number of data points, <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM22"><mml:msub><mml:mi>m</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:math></inline-formula> is the gradient of a slope at the <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM23"><mml:msup><mml:mi>i</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mi>h</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> point calculated as <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM24"><mml:msub><mml:mi>m</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mrow><mml:mfrac><mml:mrow><mml:mi mathvariant="normal">&#x0394;</mml:mi><mml:msub><mml:mi>y</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow><mml:mrow><mml:mi mathvariant="normal">&#x0394;</mml:mi><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub></mml:mrow></mml:mfrac></mml:mrow></mml:math></inline-formula> and <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM25"><mml:mrow><mml:mover><mml:mi>m</mml:mi><mml:mo stretchy="false">&#x00AF;</mml:mo></mml:mover></mml:mrow></mml:math></inline-formula> is the mean of the gradients. This smoothness value shows how much the slope of a curve varies from point to point, whereby a smoothness closer to 1 identifies a smoother curve. The rotational velocity and acceleration of the knee were calculated from the changes in knee angle over time (<xref ref-type="disp-formula" rid="disp-formula3">Equation 3</xref>) A simplified measure to represent the kinematics of the movement was defined as follows: the cumulative absolute rotational acceleration, <italic>J</italic>, was calculated as a representative measure of the explosiveness of the movement or the overall abruptness of the changes in angular velocity (<xref ref-type="disp-formula" rid="disp-formula4">Equation 4</xref>).</p>
<p>The rotational velocity <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM26"><mml:mi>&#x03C9;</mml:mi></mml:math></inline-formula> and acceleration <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM27"><mml:mi>&#x03B1;</mml:mi></mml:math></inline-formula> were calculated from the changes in the knee angle over time, <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM28"><mml:mi>t</mml:mi></mml:math></inline-formula>:<disp-formula id="disp-formula3"><label>(3)</label><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="DM3"><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:mi>&#x03C9;</mml:mi></mml:mtd><mml:mtd><mml:mo>=</mml:mo><mml:mrow><mml:mfrac><mml:mrow><mml:mi mathvariant="normal">&#x0394;</mml:mi><mml:mi>&#x03B8;</mml:mi></mml:mrow><mml:mrow><mml:mi mathvariant="normal">&#x0394;</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:mfrac></mml:mrow></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mi>&#x03B1;</mml:mi></mml:mtd><mml:mtd><mml:mo>=</mml:mo><mml:mrow><mml:mfrac><mml:mrow><mml:mi mathvariant="normal">&#x0394;</mml:mi><mml:mi>&#x03C9;</mml:mi></mml:mrow><mml:mrow><mml:mi mathvariant="normal">&#x0394;</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:mfrac></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:math></disp-formula><italic>J</italic> is the cumulative absolute rotational acceleration of an action at a single joint, <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM29"><mml:mo stretchy="false">&#x2223;</mml:mo><mml:mi>&#x03B1;</mml:mi><mml:mo stretchy="false">&#x2223;</mml:mo></mml:math></inline-formula> is the absolute instantaneous rotational acceleration and <italic>dt</italic> is the change in time.<disp-formula id="disp-formula4"><label>(4)</label><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="DM4"><mml:mi>J</mml:mi><mml:mo>=</mml:mo><mml:mo>&#x222B;</mml:mo><mml:mo>&#x2223;</mml:mo><mml:mi>&#x03B1;</mml:mi><mml:mo>&#x2223;</mml:mo><mml:mo>&#x22C5;</mml:mo><mml:mrow><mml:mi mathvariant="italic">d</mml:mi></mml:mrow><mml:mi>t</mml:mi></mml:math></disp-formula>After the mean, maximum, and minimum joint angles had been prepared for each participant&#x2019;s squat and sit-to-stand actions, this was fed into the PCA feature extraction component. This method of dimensionality reduction is used to rank the feature importance for each action, allowing only the most important features to be used in the subsequent methods (<xref ref-type="bibr" rid="B33">33</xref>). This method used a two-component PCA with single value decomposition, thus creating a linear dimensionality reduction and accounting for both the actions used. As this was performed on each participant, this allows the identification of features that are representative of all participants in the trial. To achieve this the features are ranked by importance in each action, then we analyse the frequency of these features appearing in the ten highest feature importance scores. This allows us to create a histogram to identify which features will be most representative of the whole participant group, as a higher frequency is due to this feature being ranked highly across most participants.</p>
</sec>
<sec id="s2d"><label>2.4</label><title>Statistical testing</title>
<p>Two-tailed paired <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM30"><mml:mi>t</mml:mi></mml:math></inline-formula>-tests were performed between the pre- and post-injection values for the cumulative acceleration and smoothness values at the joints identified by PCA as being most important in discriminating between the two conditions. A significance level of 0.05 was used. The pre- and post-injection conditions were compared for both the squat and sit-to-stand movements. In addition to the paired <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM31"><mml:mi>t</mml:mi></mml:math></inline-formula>-tests, Bland-Altman plots, with a range of limits of agreement of 1.96 and approximate confidence intervals as described by Bland and Altman (<xref ref-type="bibr" rid="B34">34</xref>), were created to assess the agreement between the pre- and post-injection cumulative acceleration and smoothness for the same selection of joints. For each pair of pre/post-injection observations, the difference between the scores was plotted against the mean of the two scores; the overall plot provides information about the level of variation and whether or not there was any systematic bias.</p>
</sec>
</sec>
<sec id="s3" sec-type="results"><label>3</label><title>Results</title>
<sec id="s3a"><label>3.1</label><title>Feature engineering and biomarker identification</title>
<p>Initially, the data created consisted solely of positions and orientations of joints in a 3D Cartesian coordinate system for both a squat and a sit-to-stand action, which first needs to be transformed and engineered into clinically relevant features, such as the knee flexion/extension, hip abduction/adduction, and the ankle flexion/extension. The dimensionality of this data was then reduced and separated into the squat and sit-to-stand actions using the PCA. Performing this PCA for each subject&#x2019;s actions finds the most representative features as a histogram shown in <xref ref-type="fig" rid="F4">Figures&#x00A0;4</xref>, <xref ref-type="fig" rid="F5">5</xref>, this was derived from the total counts of each of the top five most represented features of each participant and in each action performed. <xref ref-type="fig" rid="F4">Figure&#x00A0;4</xref> for example, shows the most represented features among all patients in the squat action to be the mean and maximum knee flexion for both the left and right side. On the other hand, the sit-to-stand feature histogram as shown in <xref ref-type="fig" rid="F5">Figure&#x00A0;5</xref> shows the most representative features including both arm abduction and elbow flexion.</p>
<fig id="F4" position="float"><label>Figure 4</label>
<caption><p>The most common features among the most representative biomarkers during the squat action.</p></caption>
<graphic xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="fdgth-06-1324511-g004.tif"/>
</fig>
<fig id="F5" position="float"><label>Figure 5</label>
<caption><p>The most common features among the most representative biomarkers during the sit-to-stand action.</p></caption>
<graphic xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="fdgth-06-1324511-g005.tif"/>
</fig>
</sec>
<sec id="s3b"><label>3.2</label><title>Statistical testing</title>
<p><xref ref-type="table" rid="T1">Tables&#x00A0;1</xref>, <xref ref-type="table" rid="T2">2</xref> show the results of the paired <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM32"><mml:mi>t</mml:mi></mml:math></inline-formula>-test run on the cumulative acceleration and smoothness for the five most representative features, as provided by the PCA histograms. These tables highlight the <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM33"><mml:mi>t</mml:mi></mml:math></inline-formula>- and <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM34"><mml:mi>p</mml:mi></mml:math></inline-formula>-values for both biomarkers performed on each of the extracted kinematics. <xref ref-type="table" rid="T1">Table&#x00A0;1</xref> highlights that the smoothness of the maximum of both the left and right knee flexion during a squat has a statistically significant difference between the pre- and post-treatment measurements. However, <xref ref-type="table" rid="T2">Table&#x00A0;2</xref> highlights considerably more biomarkers with a statistically significant difference in the sit-to-stand; these include the smoothness of the maximum knee flexion on both sides, smoothness of the left arm abduction, and both the smoothness and cumulative acceleration of the maximum elbow flexion on both sides.</p>
<table-wrap id="T1" position="float"><label>Table 1</label>
<caption><p>Paired <italic>t</italic>-test results showing the <italic>t</italic> and <italic>p</italic> values, with the values in bold representing <italic>p</italic>&#x2009;&#x003C;&#x2009;0.05, for each of the most representative squat biomarkers.</p></caption>
<table frame="hsides" rules="groups">
<colgroup>
<col align="left"/>
<col align="center"/>
<col align="center"/>
</colgroup>
<thead>
<tr>
<th valign="top" align="left">Biomarker (Squat)</th>
<th valign="top" align="center">Cumulative acceleration <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM35"><mml:mi>t</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>p</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula></th>
<th valign="top" align="center">Smoothness <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM36"><mml:mi>t</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>p</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Right Knee Flexion (mean)</td>
<td valign="top" align="center">1.786 (0.090)</td>
<td valign="top" align="center">1.203 (0.244)</td>
</tr>
<tr>
<td valign="top" align="left">Right Knee Flexion (max)</td>
<td valign="top" align="center">0.126 (0.901)</td>
<td valign="top" align="center">2.324 <bold>(0.031)</bold></td>
</tr>
<tr>
<td valign="top" align="left">Left Knee Flexion (mean)</td>
<td valign="top" align="center">1.907 (0.072)</td>
<td valign="top" align="center">1.196 (0.246)</td>
</tr>
<tr>
<td valign="top" align="left">Left Knee Flexion (max)</td>
<td valign="top" align="center">0.483 (0.635)</td>
<td valign="top" align="center">2.528 <bold>(0.021)</bold></td>
</tr>
<tr>
<td valign="top" align="left">Right Knee Flexion (min)</td>
<td valign="top" align="center">0.203 (0.841)</td>
<td valign="top" align="center">1.385 (0.182)</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="table-fn1"><p>The bold values highlight the statistically significant values (<italic>p</italic>&#x2009;&#x003C;&#x2009;0.05).</p></fn>
</table-wrap-foot>
</table-wrap>
<table-wrap id="T2" position="float"><label>Table 2</label>
<caption><p>Paired <italic>t</italic>-test results showing the <italic>t</italic> and <italic>p</italic> values, with the values in bold representing <italic>p</italic>&#x2009;&#x003C;&#x2009;0.05, for each of the most representative sit-to-stand biomarkers.</p></caption>
<table frame="hsides" rules="groups">
<colgroup>
<col align="left"/>
<col align="center"/>
<col align="center"/>
</colgroup>
<thead>
<tr>
<th valign="top" align="left">Biomarker (Sit-to-Stand)</th>
<th valign="top" align="center">Cumulative acceleration <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM37"><mml:mi>t</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>p</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula></th>
<th valign="top" align="center">Smoothness <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM38"><mml:mi>t</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>p</mml:mi><mml:mo stretchy="false">)</mml:mo></mml:math></inline-formula></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Left Knee Flexion (max)</td>
<td valign="top" align="center">0.772 (0.450)</td>
<td valign="top" align="center">2.976 <bold>(0.008)</bold></td>
</tr>
<tr>
<td valign="top" align="left">Right Knee Flexion (max)</td>
<td valign="top" align="center">0.558 (0.584)</td>
<td valign="top" align="center">2.401 <bold>(0.027)</bold></td>
</tr>
<tr>
<td valign="top" align="left">Left Arm Abduction (mean)</td>
<td valign="top" align="center">0.675 (0.508)</td>
<td valign="top" align="center">3.586 <bold>(0.002)</bold></td>
</tr>
<tr>
<td valign="top" align="left">Right Elbow Flexion (max)</td>
<td valign="top" align="center">2.451 <bold>(0.024)</bold></td>
<td valign="top" align="center">3.592 <bold>(0.002)</bold></td>
</tr>
<tr>
<td valign="top" align="left">Left Elbow Flexion (max)</td>
<td valign="top" align="center">2.364 <bold>(0.029)</bold></td>
<td valign="top" align="center">2.604 <bold>(0.017)</bold></td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="table-fn2"><p>The bold values highlight the statistically significant values (<italic>p</italic>&#x2009;&#x003C;&#x2009;0.05).</p></fn>
</table-wrap-foot>
</table-wrap>
<p>Additionally, the subsequent Bland-Altman plots shown in <xref ref-type="fig" rid="F7">Figures&#x00A0;7</xref>, <xref ref-type="fig" rid="F6">6</xref> show that both the squat and sit-to-stand actions have a reasonable variability with most of the points falling within the two confidence intervals. The sit-to-stand action in <xref ref-type="fig" rid="F6">Figure&#x00A0;6</xref> shows a reduced variability with a consistent spread in the mean of differences as the mean of the methods increases. However, the squat action as shown in <xref ref-type="fig" rid="F7">Figure&#x00A0;7</xref> has an increase in the variability which can be seen by the larger increase in the mean of the differences as the mean of the methods increases.</p>
<fig id="F6" position="float"><label>Figure 6</label>
<caption><p>Bland Altman plot showing the difference against the mean for each patient (for the smoothness of the maximum right knee flexion) during the sit-to-stand action. The variation around the mean shows the apparent differences before and after the injection.</p></caption>
<graphic xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="fdgth-06-1324511-g006.tif"/>
</fig>
<fig id="F7" position="float"><label>Figure 7</label>
<caption><p>Bland Altman plot showing the difference against the mean for each patient (for the smoothness of the maximum right knee flexion) during the squat action. The variation around the mean shows the apparent differences before and after the injection.</p></caption>
<graphic xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="fdgth-06-1324511-g007.tif"/>
</fig>
</sec>
<sec id="s3c"><label>3.3</label><title>Clinical trial results</title>
<p>The individual results from the clinical trial can be seen in <xref ref-type="fig" rid="F8">Figures&#x00A0;8</xref>, <xref ref-type="fig" rid="F9">9</xref>, showing the change between the pre-and post-injection data for each of the statistically significant biomarkers identified in <xref ref-type="table" rid="T1">Tables&#x00A0;1</xref>, <xref ref-type="table" rid="T2">2</xref> for both the squat and sit-to-stand actions. These two figures show both the box and whisker plots to show the change for the entire group, as well as the scattered points to show the exact change for each individual. These results show that each of the biomarkers saw a median increase between the pre-and post-injection, however, there were some outliers in the data where the change was greater than expected or the value decreased after the treatment.</p>
<fig id="F8" position="float"><label>Figure 8</label>
<caption><p>Box plot showing the median as well as the first and third quartile for percentage change for the entire clinical trial population from pre- to post-injection for the squat action projected on a logarithmic scale, showing the biomarkers identified to be statistically significant from the paired <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM39"><mml:mi>t</mml:mi></mml:math></inline-formula>-tests.</p></caption>
<graphic xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="fdgth-06-1324511-g008.tif"/>
</fig>
<fig id="F9" position="float"><label>Figure 9</label>
<caption><p>Box plot showing the median as well as the first and third quartile for percentage change for the entire clinical trial population from pre- to post-injection for the sit-to-stand action projected on a logarithmic scale, showing the biomarkers identified to be statistically significant from the paired <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM40"><mml:mi>t</mml:mi></mml:math></inline-formula>-tests.</p></caption>
<graphic xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="fdgth-06-1324511-g009.tif"/>
</fig>
</sec>
</sec>
<sec id="s4" sec-type="discussion"><label>4</label><title>Discussion</title>
<p>In examining the PCA results depicted in <xref ref-type="fig" rid="F4">Figures&#x00A0;4</xref>, <xref ref-type="fig" rid="F5">5</xref>, a pivotal revelation surfaces regarding the extraction of biomarkers from motion data. These histograms serve as a visual representation of the most significant features associated with each action. Notably, these identified features stand out as crucial candidates for biomarkers, showcasing their importance within the specific actions analysed. This finding holds profound implications, suggesting the potential utility of these biomarkers in diagnosing knee disorders and tracking disease or rehabilitation progression (<xref ref-type="bibr" rid="B35">35</xref>).</p>
<p>Paired statistical tests, differentiating pre-treatment and post-treatment, offer valuable insights into outcomes and methodological significance. <xref ref-type="fig" rid="F6">Figures&#x00A0;6</xref>, <xref ref-type="fig" rid="F7">7</xref> highlight crucial observations on the efficacy of the data collection technique. Results suggest the method establishes a sensitive coordinate system, detecting changes induced by treatment. Supported by participants receiving local anaesthetic, post-injection movements, devoid of knee pain, align more closely with physiological capabilities (<xref ref-type="bibr" rid="B36">36</xref>, <xref ref-type="bibr" rid="B37">37</xref>). This nuanced understanding underscores the method&#x2019;s importance in robustly assessing biomechanical changes post-treatment. However, Bland-Altman plots in <xref ref-type="fig" rid="F6">Figures&#x00A0;6</xref>, <xref ref-type="fig" rid="F7">7</xref> reveal areas for methodology improvement. While most participants show limited point spread, particularly in the squat action, outliers suggest selected biomarkers, though effective for many, may lack universal applicability. Consideration must be given to potential measurement error tied to treatment effectiveness variability rather than data collection or biomarker efficacy issues. Further investigation is crucial to pinpoint the source of this error, aligning with our overarching goal of precision in clinical biomechanics assessments. Furthermore, the effectiveness of these techniques in identifying biomarkers for knee disorders is evident in <xref ref-type="fig" rid="F8">Figures&#x00A0;8</xref>, <xref ref-type="fig" rid="F9">9</xref>. Each biomarker displays a median percentage change increase, suggesting potential for monitoring movement capabilities. However, addressing anomalies in results, stemming from individualistic data, is crucial. Variations arise from the assumption of consistent movement patterns, not always holding true due to external factors like footwear, as demonstrated in prior research (<xref ref-type="bibr" rid="B38">38</xref>). Acknowledging and dissecting these intricacies are crucial steps in refining the application of these techniques for clinical biomechanics assessments, aligning with our overarching goals. An essential consideration in evaluating a technique&#x2019;s diagnostic capabilities is the impact of measurement error. Nakano et al. report a mean absolute error in joint center location ranging from 20mm to 40mm, emphasising the challenge (<xref ref-type="bibr" rid="B39">39</xref>). Comparing with marker-based motion capture, a 50mm marker registration uncertainty results in a 7<sup>&#x2218;</sup> peak joint angle variability (<xref ref-type="bibr" rid="B40">40</xref>). In contrast, inertial measurement units (IMUs) have up to 11.4<sup>&#x2218;</sup> measurement error (<xref ref-type="bibr" rid="B41">41</xref>). This comparative analysis underscores the nuanced landscape of measurement errors, emphasising the need for a meticulous approach to enhance precision in clinical biomechanics assessments, a central objective of our study.</p>
<p>In addressing the challenge of measurement error, our approach is grounded in the assumption of non-differential measurement error. This decision is guided by our meticulous standardisation of variables for participants&#x2019; pre- and post-measurements. Every aspect, including the timing of measurements spaced 15-30 minutes apart on the same day, attire, lighting conditions, and the recording device utilising the same camera, is held consistent. Through this comprehensive standardisation, we maintain a uniform measurement error between both sets of measurements. While acknowledging potential effects on diagnostic accuracy, especially in comparisons between distinct groups, our primary focus centers on comparing the same individual before and after a specific treatment (<xref ref-type="bibr" rid="B42">42</xref>). This deliberate approach aligns seamlessly with our overarching goals, emphasising the crucial need for precision in clinical biomechanics assessments and contributing to the ongoing discourse on measurement error considerations.</p>
<p>The significance of these findings lies in the adaptability of the proposed methods for feature engineering across diverse applications. These techniques, showcased in a relatively small knee-based case study, carry implications that extend beyond the specific context. Their versatility allows seamless adaptation to various applications, addressing disorders not only in different body parts but also encompassing broader movement issues such as those associated with neurological disorders. It&#x2019;s noteworthy that the actions performed in our study, though centered on the knee, can be tailored to suit the requirements of different applications. This adaptability underscores the broader potential of our techniques, aligning with our overarching goal of establishing a flexible and widely applicable framework for clinical biomechanics assessments.</p>
<p>A significant observation stems from the success of our feature extraction methodology, where each tested feature underwent initial extraction using PCA. Though further reduced for the paired <inline-formula><mml:math xmlns:mml="http://www.w3.org/1998/Math/MathML" id="IM43"><mml:mi>t</mml:mi></mml:math></inline-formula>-test due to time series data constraints, these features provided descriptions based on both smoothness and cumulative acceleration from joint kinematics. Transforming these biomarker descriptions, as presented in <xref ref-type="table" rid="T1">Tables&#x00A0;1</xref>, <xref ref-type="table" rid="T2">2</xref>, underscores their statistical significance. Beyond distinguishing actions from movement, these PCA-extracted biomarkers prove valuable in gauging treatment success (<xref ref-type="bibr" rid="B43">43</xref>, <xref ref-type="bibr" rid="B44">44</xref>). This aligns with our broader goals, enhancing the potential for using motion-based biomarkers in clinical assessments and advancing our approach to clinical biomechanics. By synthesising the presented results, the described methods emerge as promising tools for clinical applications. These techniques successfully fulfill their objectives, providing a cost-effective solution for clinical biomechanics assessments. Importantly, their versatility extends beyond the confines of lower limb assessments, offering a wide range of potential applications. Beyond their diagnostic capabilities, this solution holds the potential to identify novel biomarkers associated with a diverse array of movement-debilitating conditions, including injuries, illnesses, and disorders. This multi-faceted approach aligns with our overarching goals of revolutionising clinical biomechanics, creating a scalable and adaptable framework with far-reaching implications for the field.</p>
<p>This study has played a crucial role in identifying the current limitations inherent in the described techniques. Notably, the absence of a controlled environment can introduce challenges, leading to occlusion and jitter problems, which necessitate attention in future research endeavors. Moreover, the limitations associated with the design of clinical trials come to the forefront. Establishing a truly representative sample proves challenging, and in this case, determining the prevalence of right-sided biomechanics raises intricate questions. It remains uncertain whether this prevalence is a result of the sample population being right-dominant or if patients experienced bilateral pain, thereby challenging the assumption of an equal distribution of pain in each knee. Recognising and addressing these limitations are vital steps in refining our methodologies, ensuring more accurate and comprehensive clinical biomechanics assessments aligned with our overarching goals.</p>
<p>In light of our current findings, we have identified promising avenues for future research and comparative studies, aimed at delving deeper into the capabilities of the proposed techniques. One such initiative involves a direct comparison between SMPL-based single RGB camera methods, MoCap, and IMUs. This comparative analysis seeks to elucidate the variability inherent in these different methods, providing valuable insights into their respective strengths and limitations. Additionally, we aim to extend the applicability of our motion-based biomarkers by introducing new disease pathologies to the dataset. This strategic expansion aims to assess the effectiveness of these biomarkers in distinguishing not only between diseases but also in discerning normal knee conditions. These future investigations align with our overarching goal of refining and broadening the scope of clinical biomechanics assessments, paving the way for advancements in remote monitoring and intervention strategies.</p>
<p>In conclusion, this study has not only demonstrated the effectiveness of utilising motion-based biomarkers for quantifying movement but has also established a robust foundation for conducting objective MSK analyses. The techniques presented offer a promising avenue for implementing a standardised method of MSK analysis, achievable with any standard camera, even a mobile phone. This accessibility opens doors for remote disease monitoring, enabling the early identification of pre-disease stages. By facilitating timely interventions, these methods have the potential to significantly alleviate the burden on the healthcare industry. In essence, our study contributes to the broader goal of revolutionising clinical biomechanics assessments, providing a low-cost solution with far-reaching applications beyond lower limb assessments, and paving the way for advancements in remote healthcare monitoring and intervention strategies.</p>
</sec>
</body>
<back>
<sec id="s5" sec-type="data-availability"><title>Data availability statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec id="s6" sec-type="ethics-statement"><title>Ethics statement</title>
<p>The studies involving humans were approved by Research Ethics &#x0026; Governance - University of Lincoln (<ext-link ext-link-type="uri" xlink:href="https://www.ethics@lincoln.ac.uk">ethics@lincoln.ac.uk</ext-link>). The studies were conducted in accordance with the local legislation and institutional requirements. The participants provided their written informed consent to participate in this study. Written informed consent was obtained from the individual(s) for the publication of any potentially identifiable images or data included in this article.</p>
</sec>
<sec id="s7" sec-type="author-contributions"><title>Author contributions</title>
<p>KA: Conceptualisation, Data curation, Formal Analysis, Investigation, Methodology, Validation, Visualisation, Writing &#x2013; original draft, Writing &#x2013; review &#x0026; editing, Software; LZ: Conceptualisation, Project administration, Resources, Supervision, Writing &#x2013; review &#x0026; editing, Funding acquisition, Investigation; YW: Conceptualisation, Investigation, Resources, Software, Writing &#x2013; review &#x0026; editing; APW: Conceptualisation, Formal Analysis, Methodology, Validation, Writing &#x2013; review &#x0026; editing; PL: Conceptualisation, Data curation, Funding acquisition, Resources, Supervision, Validation, Writing &#x2013; review &#x0026; editing; XY: Funding acquisition, Project administration, Resources, Supervision, Writing &#x2013; review &#x0026; editing.</p>
</sec>
<sec id="s8" sec-type="funding-information"><title>Funding</title>
<p>The author(s) declare financial support was received for the research, authorship, and/or publication of this article.</p>
<p>This work was supported by the EPSRC Doctoral Training Partnership.</p>
</sec>
<ack><title>Acknowledgments</title>
<p>All participants were recruited by MSK Doctors. All figures and tables used in this article were created by the authors unless specified in the figure legend. In addition, this research would not be possible without the developments at Max-Planck-Gesellschaft on the SMPL-X and VIBE models.</p>
</ack>
<sec id="s9" sec-type="COI-statement"><title>Conflict of interest</title>
<p>PL is employed by MSK Doctors.</p>
<p>The remaining authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="s10" sec-type="disclaimer"><title>Publisher&#x0027;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organisations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list><title>References</title>
<ref id="B1"><label>1.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lespasio</surname><given-names>M</given-names></name></person-group>. <article-title>Knee osteoarthritis: A primer</article-title>. <source>Perm J</source>. (<year>2018</year>) 21:<fpage>1</fpage>&#x2013;<lpage>7</lpage>. <pub-id pub-id-type="doi">10.7812/TPP/17-084</pub-id></citation></ref>
<ref id="B2"><label>2.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Yucesoy</surname><given-names>B</given-names></name><name><surname>Charles</surname><given-names>LE</given-names></name><name><surname>Baker</surname><given-names>B</given-names></name><name><surname>Burchfiel</surname><given-names>CM</given-names></name></person-group>. <article-title>Occupational, genetic risk factors for osteoarthritis: A review</article-title>. <source>Work</source>. (<year>2015</year>) <volume>50</volume>:<fpage>261</fpage>&#x2013;<lpage>73</lpage>. <pub-id pub-id-type="doi">10.3233/wor-131739</pub-id><pub-id pub-id-type="pmid">24004806</pub-id></citation></ref>
<ref id="B3"><label>3.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Price</surname><given-names>AJ</given-names></name><name><surname>Alvand</surname><given-names>A</given-names></name><name><surname>Troelsen</surname><given-names>A</given-names></name><name><surname>Katz</surname><given-names>JN</given-names></name><name><surname>Hooper</surname><given-names>G</given-names></name><name><surname>Gray</surname><given-names>A</given-names></name></person-group>, et al. <article-title>Knee replacement</article-title>. <source>Lancet</source>. (<year>2018</year>) <volume>392</volume>:<fpage>1672</fpage>&#x2013;<lpage>82</lpage>. <pub-id pub-id-type="doi">10.1016/S0140-6736(18)32344-4</pub-id><pub-id pub-id-type="pmid">30496082</pub-id></citation></ref>
<ref id="B4"><label>4.</label><citation citation-type="other"><collab>[Dataset] Digital N</collab>. <comment>Finalised patient reported outcome measures (PROMs) in England for hip &#x0026; knee replacements, April 2018&#x2013;March 2019 (2020)</comment>.</citation></ref>
<ref id="B5"><label>5.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jenkins</surname><given-names>PJ</given-names></name><name><surname>Clement</surname><given-names>ND</given-names></name><name><surname>Hamilton</surname><given-names>DF</given-names></name><name><surname>Gaston</surname><given-names>P</given-names></name><name><surname>Patton</surname><given-names>JT</given-names></name><name><surname>Howie</surname><given-names>CR</given-names></name></person-group>. <article-title>Predicting the cost-effectiveness of total hip, knee replacement</article-title>. <source>Bone Joint J</source>. (<year>2013</year>) <volume>95-B</volume>:<fpage>115</fpage>&#x2013;<lpage>21</lpage>. <pub-id pub-id-type="doi">10.1302/0301-620X.95B1.29835</pub-id><pub-id pub-id-type="pmid">23307684</pub-id></citation></ref>
<ref id="B6"><label>6.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sparkes</surname><given-names>V</given-names></name><name><surname>Whatling</surname><given-names>GM</given-names></name><name><surname>Biggs</surname><given-names>P</given-names></name><name><surname>Khatib</surname><given-names>N</given-names></name><name><surname>Al-Amri</surname><given-names>M</given-names></name><name><surname>Williams</surname><given-names>D</given-names></name></person-group>, et al. <article-title>Comparison of gait, functional activities, and patient-reported outcome measures in patients with knee osteoarthritis and healthy adults using 3d motion analysis and activity monitoring: An exploratory case-control analysis</article-title>. <source>Orthop Res Rev</source>. (<year>2019</year>) 11:<fpage>129</fpage>&#x2013;<lpage>40</lpage>.<pub-id pub-id-type="pmid">31572022</pub-id></citation></ref>
<ref id="B7"><label>7.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Duffell</surname><given-names>LD</given-names></name><name><surname>Jordan</surname><given-names>SJ</given-names></name><name><surname>Cobb</surname><given-names>JP</given-names></name><name><surname>McGregor</surname><given-names>AH</given-names></name></person-group>. <article-title>Gait adaptations with aging in healthy participants and people with knee-joint osteoarthritis</article-title>. <source>Gait Posture</source>. (<year>2017</year>) <volume>57</volume>:<fpage>246</fpage>&#x2013;<lpage>51</lpage>. <pub-id pub-id-type="doi">10.1016/j.gaitpost.2017.06.015</pub-id><pub-id pub-id-type="pmid">28672154</pub-id></citation></ref>
<ref id="B8"><label>8.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Robbins</surname><given-names>S</given-names></name><name><surname>Pelletier</surname><given-names>JP</given-names></name><name><surname>Abram</surname><given-names>F</given-names></name><name><surname>Boily</surname><given-names>M</given-names></name><name><surname>Antoniou</surname><given-names>J</given-names></name><name><surname>Martineau</surname><given-names>P</given-names></name><etal/></person-group>, <article-title>Gait risk factors for disease progression differ between non-traumatic and post-traumatic knee osteoarthritis</article-title>. <source>Osteoarthritis Cartilage</source>. (<year>2021</year>) <volume>29</volume>:<fpage>1487</fpage>&#x2013;<lpage>97</lpage>. <pub-id pub-id-type="doi">10.1016/j.joca.2021.07.014</pub-id><pub-id pub-id-type="pmid">34348184</pub-id></citation></ref>
<ref id="B9"><label>9.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ornetti</surname><given-names>P</given-names></name><name><surname>Maillefert</surname><given-names>JF</given-names></name><name><surname>Laroche</surname><given-names>D</given-names></name><name><surname>Morisset</surname><given-names>C</given-names></name><name><surname>Dougados</surname><given-names>M</given-names></name><name><surname>Gossec</surname><given-names>L</given-names></name></person-group>. <article-title>Gait analysis as a quantifiable outcome measure in hip or knee osteoarthritis: A systematic review</article-title>. <source>Joint Bone Spine</source>. (<year>2010</year>) <volume>77</volume>:<fpage>421</fpage>&#x2013;<lpage>5</lpage>. <pub-id pub-id-type="doi">10.1016/j.jbspin.2009.12.009</pub-id><pub-id pub-id-type="pmid">20471899</pub-id></citation></ref>
<ref id="B10"><label>10.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Menolotto</surname><given-names>M</given-names></name><name><surname>Komaris</surname><given-names>DS</given-names></name><name><surname>Tedesco</surname><given-names>S</given-names></name><name><surname>O&#x2019;flynn</surname><given-names>B</given-names></name><name><surname>Walsh</surname><given-names>M</given-names></name></person-group>. <article-title>Motion capture technology in industrial applications: A systematic review</article-title>. <source>Sensors (Switzerland)</source>. (<year>2020</year>) <volume>20</volume>:<fpage>1</fpage>&#x2013;<lpage>25</lpage>. <pub-id pub-id-type="doi">10.3390/s20195687</pub-id></citation></ref>
<ref id="B11"><label>11.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Majeed Alsaadi</surname><given-names>I</given-names></name></person-group>. <article-title>Study on most popular behavioral biometrics, advantages, disadvantages and recent applications: A review</article-title>. <source>Int J Sci Technol Res</source>. (<year>2021</year>) <volume>10</volume>:<fpage>1</fpage>. <pub-id pub-id-type="doi">10.13140/RG.2.2.28802.09926</pub-id></citation></ref>
<ref id="B12"><label>12.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Desmarais</surname><given-names>Y</given-names></name><name><surname>Mottet</surname><given-names>D</given-names></name><name><surname>Slangen</surname><given-names>P</given-names></name><name><surname>Montesinos</surname><given-names>P</given-names></name></person-group>. <article-title>A review of 3D human pose estimation algorithms for markerless motion capture</article-title>. <source>Comput Vis Image Underst</source>. (<year>2021</year>) <volume>212</volume>:<fpage>103275</fpage>. <pub-id pub-id-type="doi">10.1016/j.cviu.2021.103275</pub-id></citation></ref>
<ref id="B13"><label>13.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cao</surname><given-names>Z</given-names></name><name><surname>Hidalgo Martinez</surname><given-names>G</given-names></name><name><surname>Simon</surname><given-names>T</given-names></name><name><surname>Wei</surname><given-names>S</given-names></name><name><surname>Sheikh</surname><given-names>YA</given-names></name></person-group>. <article-title>Openpose: Realtime multi-person 2D pose estimation using part affinity fields</article-title>. <source>IEEE Trans Pattern Anal Mach Intell</source>. (<year>2019</year>) 43:172&#x2013;86.</citation></ref>
<ref id="B14"><label>14.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cheng</surname><given-names>Z</given-names></name><name><surname>Ligouri</surname><given-names>A</given-names></name><name><surname>Fogle</surname><given-names>R</given-names></name><name><surname>Webb</surname><given-names>T</given-names></name></person-group>. <article-title>Capturing human motion in natural environments</article-title>. <source>Procedia Manuf</source>. (<year>2015</year>) <volume>3</volume>:<fpage>3828</fpage>&#x2013;<lpage>35</lpage>. <pub-id pub-id-type="doi">10.1016/j.promfg.2015.07.886</pub-id>. <comment>6th International Conference on Applied Human Factors, Ergonomics (AHFE 2015), the Affiliated Conferences, AHFE 2015</comment></citation></ref>
<ref id="B15"><label>15.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Matsumoto</surname><given-names>T</given-names></name><name><surname>Shimosato</surname><given-names>K</given-names></name><name><surname>Maeda</surname><given-names>T</given-names></name><name><surname>Murakami</surname><given-names>T</given-names></name><name><surname>Murakoso</surname><given-names>K</given-names></name><name><surname>Mino</surname><given-names>K</given-names></name><etal/></person-group>, <article-title>Human pose annotation using a motion capture system for loose-fitting clothes</article-title>. <source>IEICE Trans Inf Syst</source>. (<year>2020</year>) <volume>E103D</volume>:<fpage>1257</fpage>&#x2013;<lpage>64</lpage>. <pub-id pub-id-type="doi">10.1587/transinf.2019MVP0007</pub-id></citation></ref>
<ref id="B16"><label>16.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Nouei</surname><given-names>MT</given-names></name><name><surname>Kamyad</surname><given-names>AV</given-names></name><name><surname>Soroush</surname><given-names>AR</given-names></name><name><surname>Ghazalbash</surname><given-names>S</given-names></name></person-group>. <article-title>A comprehensive operating room information system using the Kinect sensors and RFID</article-title>. <source>J Clin Monit Comput</source>. (<year>2015</year>) <volume>29</volume>:<fpage>251</fpage>&#x2013;<lpage>61</lpage>. <pub-id pub-id-type="doi">10.1007/s10877-014-9591-5</pub-id><pub-id pub-id-type="pmid">25017016</pub-id></citation></ref>
<ref id="B17"><label>17.</label><citation citation-type="other"><person-group person-group-type="author"><name><surname>Cao</surname><given-names>Z</given-names></name><name><surname>Simon</surname><given-names>T</given-names></name><name><surname>Wei</surname><given-names>SE</given-names></name><name><surname>Sheikh</surname><given-names>Y</given-names></name></person-group>. <comment>Realtime multi-person 2D pose estimation using part affinity fields. In: <italic>Proceedings - 30th IEEE Conference on Computer Vision and Pattern Recognition, CVPR 2017</italic> (2017). p. 1302&#x2013;10. doi:10.1109/CVPR.2017.143</comment></citation></ref>
<ref id="B18"><label>18.</label><citation citation-type="other"><person-group person-group-type="author"><name><surname>Toshev</surname><given-names>A</given-names></name><name><surname>Szegedy</surname><given-names>C</given-names></name></person-group>. <comment>Deeppose: Human pose estimation via deep neural networks (2014)</comment>.</citation></ref>
<ref id="B19"><label>19.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Stenum</surname><given-names>J</given-names></name><name><surname>Cherry-Allen</surname><given-names>KM</given-names></name><name><surname>Pyles</surname><given-names>CO</given-names></name><name><surname>Reetzke</surname><given-names>RD</given-names></name><name><surname>Vignos</surname><given-names>MF</given-names></name><name><surname>Roemmich</surname><given-names>RT</given-names></name></person-group>. <article-title>Applications of pose estimation in human health and performance across the lifespan</article-title>. <source>Sensors</source>. (<year>2021</year>) <volume>21</volume>:<fpage>7315</fpage>. <pub-id pub-id-type="doi">10.3390/s21217315</pub-id><pub-id pub-id-type="pmid">34770620</pub-id></citation></ref>
<ref id="B20"><label>20.</label><citation citation-type="other"><person-group person-group-type="author"><name><surname>Pavlakos</surname><given-names>G</given-names></name><name><surname>Choutas</surname><given-names>V</given-names></name><name><surname>Ghorbani</surname><given-names>N</given-names></name><name><surname>Bolkart</surname><given-names>T</given-names></name><name><surname>Osman</surname><given-names>AA</given-names></name><name><surname>Tzionas</surname><given-names>D</given-names></name><etal/></person-group>, <comment>Expressive body capture: 3D hands, face, and body from a single image. In: <italic>Proceedings of the IEEE Computer Society Conference on Computer Vision and Pattern Recognition</italic> (2019). p. 10967&#x2013;77. doi:10.1109/CVPR.2019.01123</comment></citation></ref>
<ref id="B21"><label>21.</label><citation citation-type="other"><person-group person-group-type="author"><name><surname>Kocabas</surname><given-names>M</given-names></name><name><surname>Athanasiou</surname><given-names>N</given-names></name><name><surname>Black</surname><given-names>MJ</given-names></name></person-group>. <comment>Vibe: Video inference for human body pose and shape estimation. In: <italic>Proceedings of the IEEE Computer Society Conference on Computer Vision and Pattern Recognition</italic> (2020). p. 5252&#x2013;62. doi:10.1109/CVPR42600.2020.00530</comment></citation></ref>
<ref id="B22"><label>22.</label><citation citation-type="other"><person-group person-group-type="author"><name><surname>Choi</surname><given-names>H</given-names></name><name><surname>Moon</surname><given-names>G</given-names></name><name><surname>Chang</surname><given-names>JY</given-names></name><name><surname>Lee</surname><given-names>KM</given-names></name></person-group>. <comment>Beyond static features for temporally consistent 3D human pose and shape from a video (2020). doi:10.1109/cvpr46437.2021.00200</comment></citation></ref>
<ref id="B23"><label>23.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Albert</surname><given-names>JA</given-names></name><name><surname>Owolabi</surname><given-names>V</given-names></name><name><surname>Gebel</surname><given-names>A</given-names></name><name><surname>Brahms</surname><given-names>CM</given-names></name><name><surname>Granacher</surname><given-names>U</given-names></name><name><surname>Arnrich</surname><given-names>B</given-names></name></person-group>. <article-title>Evaluation of the pose tracking performance of the azure kinect and kinect v2 for gait analysis in comparison with a gold standard: A pilot study</article-title>. <source>Sensors</source>. (<year>2020</year>) <volume>20</volume>:<fpage>5104</fpage>. <pub-id pub-id-type="doi">10.3390/s20185104</pub-id><pub-id pub-id-type="pmid">32911651</pub-id></citation></ref>
<ref id="B24"><label>24.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Guess</surname><given-names>TM</given-names></name><name><surname>Bliss</surname><given-names>R</given-names></name><name><surname>Hall</surname><given-names>JB</given-names></name><name><surname>Kiselica</surname><given-names>AM</given-names></name></person-group>. <article-title>Comparison of azure kinect overground gait spatiotemporal parameters to marker based optical motion capture</article-title>. <source>Gait Posture</source>. (<year>2022</year>) <volume>96</volume>:<fpage>130</fpage>&#x2013;<lpage>6</lpage>. <pub-id pub-id-type="doi">10.1016/j.gaitpost.2022.05.021</pub-id><pub-id pub-id-type="pmid">35635988</pub-id></citation></ref>
<ref id="B25"><label>25.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Metcalfe</surname><given-names>D</given-names></name><name><surname>Perry</surname><given-names>DC</given-names></name><name><surname>Claireaux</surname><given-names>HA</given-names></name><name><surname>Simel</surname><given-names>DL</given-names></name><name><surname>Zogg</surname><given-names>CK</given-names></name><name><surname>Costa</surname><given-names>ML</given-names></name></person-group>. <article-title>Does this patient have hip osteoarthritis?: The rational clinical examination systematic review</article-title>. <source>JAMA</source>. (<year>2019</year>) <volume>322</volume>:<fpage>2323</fpage>&#x2013;<lpage>33</lpage>. <pub-id pub-id-type="doi">10.1001/jama.2019.19413</pub-id><pub-id pub-id-type="pmid">31846019</pub-id></citation></ref>
<ref id="B26"><label>26.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Anan</surname><given-names>M</given-names></name><name><surname>Shinkoda</surname><given-names>K</given-names></name><name><surname>Suzuki</surname><given-names>K</given-names></name><name><surname>Yagi</surname><given-names>M</given-names></name><name><surname>Ibara</surname><given-names>T</given-names></name><name><surname>Kito</surname><given-names>N</given-names></name></person-group>. <article-title>Do patients with knee osteoarthritis perform sit-to-stand motion efficiently?</article-title>. <source>Gait Posture</source>. (<year>2015</year>) <volume>41</volume>:<fpage>488</fpage>&#x2013;<lpage>92</lpage>. <pub-id pub-id-type="doi">10.1016/j.gaitpost.2014.11.015</pub-id><pub-id pub-id-type="pmid">25530114</pub-id></citation></ref>
<ref id="B27"><label>27.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Turcot</surname><given-names>K</given-names></name><name><surname>Armand</surname><given-names>S</given-names></name><name><surname>Fritschy</surname><given-names>D</given-names></name><name><surname>Hoffmeyer</surname><given-names>P</given-names></name><name><surname>Suv&#x00E0;</surname><given-names>D</given-names></name></person-group>. <article-title>Sit-to-stand alterations in advanced knee osteoarthritis</article-title>. <source>Gait Posture</source>. (<year>2012</year>) <volume>36</volume>:<fpage>68</fpage>&#x2013;<lpage>72</lpage>. <pub-id pub-id-type="doi">10.1016/j.gaitpost.2012.01.005</pub-id><pub-id pub-id-type="pmid">22326239</pub-id></citation></ref>
<ref id="B28"><label>28.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mobasheri</surname><given-names>A</given-names></name><name><surname>Henrotin</surname><given-names>Y</given-names></name></person-group>. <article-title>Biomarkers of (osteo)arthritis</article-title>. <source>Biomarkers</source>. (<year>2015</year>) <volume>20</volume>:<fpage>513</fpage>&#x2013;<lpage>8</lpage>. <pub-id pub-id-type="doi">10.3109/1354750X.2016.1140930</pub-id><pub-id pub-id-type="pmid">26954784</pub-id></citation></ref>
<ref id="B29"><label>29.</label><citation citation-type="other"><person-group person-group-type="author"><name><surname>Bazarevsky</surname><given-names>V</given-names></name><name><surname>Grishchenko</surname><given-names>I</given-names></name><name><surname>Raveendran</surname><given-names>K</given-names></name><name><surname>Zhu</surname><given-names>T</given-names></name><name><surname>Zhang</surname><given-names>F</given-names></name><name><surname>Grundmann</surname><given-names>M</given-names></name></person-group>. <comment>BlazePose: On-device real-time body pose tracking. In: <italic>CVPR 2020</italic> (2020)</comment>.</citation></ref>
<ref id="B30"><label>30.</label><citation citation-type="other"><person-group person-group-type="author"><name><surname>Kanazawa</surname><given-names>A</given-names></name><name><surname>Zhang</surname><given-names>JY</given-names></name><name><surname>Felsen</surname><given-names>P</given-names></name><name><surname>Malik</surname><given-names>J</given-names></name></person-group>. <comment>Learning 3D human dynamics from video. In: <italic>2019 IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)</italic>. Los Alamitos, CA, USA: IEEE Computer Society (2019). p. 5607&#x2013;16. doi:10.1109/CVPR.2019.00576</comment></citation></ref>
<ref id="B31"><label>31.</label><citation citation-type="other"><person-group person-group-type="author"><name><surname>Mehta</surname><given-names>D</given-names></name><name><surname>Rhodin</surname><given-names>H</given-names></name><name><surname>Casas</surname><given-names>D</given-names></name><name><surname>Fua</surname><given-names>P</given-names></name><name><surname>Sotnychenko</surname><given-names>O</given-names></name><name><surname>Xu</surname><given-names>W</given-names></name><etal/></person-group>, <comment>Monocular 3D human pose estimation in the wild using improved CNN supervision. In: <italic>3D Vision (3DV), 2017 Fifth International Conference on</italic>. IEEE (2017). doi:10.1109/3dv.2017.00064</comment></citation></ref>
<ref id="B32"><label>32.</label><citation citation-type="other"><person-group person-group-type="author"><name><surname>von Marcard</surname><given-names>T</given-names></name><name><surname>Henschel</surname><given-names>R</given-names></name><name><surname>Black</surname><given-names>M</given-names></name><name><surname>Rosenhahn</surname><given-names>B</given-names></name><name><surname>Pons-Mol</surname><given-names>G</given-names></name></person-group>. <comment>Recovering accurate 3D human pose in the wild using IMUs and a moving camera. In: <italic>European Conference on Computer Vision (ECCV)</italic> (2018)</comment></citation></ref>
<ref id="B33"><label>33.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Abdi</surname><given-names>H</given-names></name><name><surname>Williams</surname><given-names>LJ</given-names></name></person-group>. <article-title>Principal component analysis</article-title>. <source>Wiley Interdiscip Rev Comput Stat</source>. (<year>2010</year>) <volume>2</volume>:<fpage>433</fpage>&#x2013;<lpage>59</lpage>. <pub-id pub-id-type="doi">10.1002/wics.101</pub-id></citation></ref>
<ref id="B34"><label>34.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Carkeet</surname><given-names>A</given-names></name><name><surname>Goh</surname><given-names>YT</given-names></name></person-group>. <article-title>Confidence and coverage for Bland&#x2013;Altman limits of agreement and their approximate confidence intervals</article-title>. <source>Stat Methods Med Res</source>. (<year>2018</year>) <volume>27</volume>:<fpage>1559</fpage>&#x2013;<lpage>74</lpage>. <pub-id pub-id-type="doi">10.1177/0962280216665419</pub-id><pub-id pub-id-type="pmid">27587594</pub-id></citation></ref>
<ref id="B35"><label>35.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>D&#x2019;Lima</surname><given-names>DD</given-names></name><name><surname>Fregly</surname><given-names>BJ</given-names></name><name><surname>Patil</surname><given-names>S</given-names></name><name><surname>Steklov</surname><given-names>N</given-names></name><name><surname>Colwell</surname><given-names>CW</given-names></name></person-group>. <article-title>Knee joint forces: Prediction, measurement, and significance</article-title>. <source>Proc Inst Mech Eng H</source>. (<year>2012</year>) <volume>226</volume>:<fpage>95</fpage>&#x2013;<lpage>102</lpage>. <pub-id pub-id-type="doi">10.1177/0954411911433372</pub-id></citation></ref>
<ref id="B36"><label>36.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kim</surname><given-names>DH</given-names></name><name><surname>Choi</surname><given-names>SS</given-names></name><name><surname>Yoon</surname><given-names>SH</given-names></name><name><surname>Lee</surname><given-names>SH</given-names></name><name><surname>Seo</surname><given-names>DK</given-names></name><name><surname>Lee</surname><given-names>IG</given-names></name><etal/></person-group>, <article-title>Ultrasound-guided genicular nerve block for knee osteoarthritis: A double-blind, randomized controlled trial of local anesthetic alone or in combination with corticosteroid</article-title>. <source>Pain Physician</source>. (<year>2018</year>) <volume>21</volume>:<fpage>41</fpage>&#x2013;<lpage>51</lpage>. <pub-id pub-id-type="doi">10.36076/ppj/2018.51</pub-id><pub-id pub-id-type="pmid">29357330</pub-id></citation></ref>
<ref id="B37"><label>37.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zawadka</surname><given-names>M</given-names></name><name><surname>Smo&#x0142;ka</surname><given-names>J</given-names></name><name><surname>Skublewska-Paszkowska</surname><given-names>M</given-names></name><name><surname>&#x0141;ukasik</surname><given-names>E</given-names></name><name><surname>Zieli&#x0144;ski</surname><given-names>G</given-names></name><name><surname>By&#x015B;</surname><given-names>A</given-names></name><etal/></person-group>, <article-title>Altered squat movement pattern in patients with chronic low back pain</article-title>. <source>Ann Agric Environ Med</source>. (<year>2020</year>) 28:158&#x2013;62. <pub-id pub-id-type="doi">10.26444/aaem/117708</pub-id></citation></ref>
<ref id="B38"><label>38.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Whitting</surname><given-names>JW</given-names></name><name><surname>Meir</surname><given-names>RA</given-names></name><name><surname>Crowley-McHattan</surname><given-names>ZJ</given-names></name><name><surname>Holding</surname><given-names>RC</given-names></name></person-group>. <article-title>Influence of footwear type on barbell back squat using 50, 70, and 90</article-title>. <source>J Strength Cond Res</source>. (<year>2016</year>) <volume>30</volume>:<fpage>1085</fpage>&#x2013;<lpage>92</lpage>. <pub-id pub-id-type="doi">10.1519/jsc.0000000000001180</pub-id><pub-id pub-id-type="pmid">26349043</pub-id></citation></ref>
<ref id="B39"><label>39.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Nakano</surname><given-names>N</given-names></name><name><surname>Sakura</surname><given-names>T</given-names></name><name><surname>Ueda</surname><given-names>K</given-names></name><name><surname>Omura</surname><given-names>L</given-names></name><name><surname>Kimura</surname><given-names>A</given-names></name><name><surname>Iino</surname><given-names>Y</given-names></name><etal/></person-group>, <article-title>Evaluation of 3D markerless motion capture accuracy using OpenPose with multiple video cameras</article-title>. <source>Front Sports Active Living</source>. (<year>2020</year>) <volume>2</volume>:<fpage>50</fpage>. <pub-id pub-id-type="doi">10.3389/fspor.2020.00050</pub-id></citation></ref>
<ref id="B40"><label>40.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Uchida</surname><given-names>TK</given-names></name><name><surname>Seth</surname><given-names>A</given-names></name></person-group>. <article-title>Conclusion or illusion: Quantifying uncertainty in inverse analyses from marker-based motion capture due to errors in marker registration and model scaling</article-title>. <source>Front Bioeng Biotechnol</source>. (<year>2022</year>) <volume>10</volume>:874725. <pub-id pub-id-type="doi">10.3389/fbioe.2022.874725</pub-id><pub-id pub-id-type="pmid">35694232</pub-id></citation></ref>
<ref id="B41"><label>41.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ricci</surname><given-names>L</given-names></name><name><surname>Taffoni</surname><given-names>F</given-names></name><name><surname>Formica</surname><given-names>D</given-names></name></person-group>. <article-title>On the orientation error of IMU: Investigating static and dynamic accuracy targeting human motion</article-title>. <source>PLoS One</source>. (<year>2016</year>) <volume>11</volume>:<fpage>e0161940</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0161940</pub-id><pub-id pub-id-type="pmid">27612100</pub-id></citation></ref>
<ref id="B42"><label>42.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mohr</surname><given-names>M</given-names></name><name><surname>Lorenzen</surname><given-names>K</given-names></name><name><surname>Palacios-Derflingher</surname><given-names>L</given-names></name><name><surname>Emery</surname><given-names>C</given-names></name><name><surname>Nigg</surname><given-names>BM</given-names></name></person-group>. <article-title>Reliability of the knee muscle co-contraction index during gait in young adults with and without knee injury history</article-title>. <source>J Electromyogr Kinesiol</source>. (<year>2018</year>) <volume>38</volume>:<fpage>17</fpage>&#x2013;<lpage>27</lpage>. <pub-id pub-id-type="doi">10.1016/j.jelekin.2017.10.014</pub-id>. <comment>Neuromechanics of fine hand-motor tasks</comment><pub-id pub-id-type="pmid">29128741</pub-id></citation></ref>
<ref id="B43"><label>43.</label><citation citation-type="other"><person-group person-group-type="author"><name><surname>Lowe</surname><given-names>DG</given-names></name></person-group>. <comment>Organization of smooth image curves at multiple scales. Tech. Rep. January (1988). doi:10.1109/ccv.1988.590036</comment></citation></ref>
<ref id="B44"><label>44.</label><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Henriksen</surname><given-names>M</given-names></name><name><surname>Hinman</surname><given-names>R</given-names></name><name><surname>Creaby</surname><given-names>M</given-names></name><name><surname>Cicuttini</surname><given-names>F</given-names></name><name><surname>Metcalf</surname><given-names>B</given-names></name><name><surname>Bowles</surname><given-names>KA</given-names></name><etal/></person-group>, <article-title>Rotational knee load predicts cartilage loss over 12 months in knee osteoarthritis</article-title>. <source>Osteoarthritis Cartilage</source>. (<year>2012</year>) <volume>20</volume>:<fpage>S17</fpage>&#x2013;<lpage>8</lpage>. <pub-id pub-id-type="doi">10.1016/j.joca.2012.02.528</pub-id></citation></ref></ref-list>
</back>
</article>