<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Archiving and Interchange DTD v2.3 20070202//EN" "archivearticle.dtd">
<article xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" article-type="methods-article">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Neurosci.</journal-id>
<journal-title>Frontiers in Neuroscience</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Neurosci.</abbrev-journal-title>
<issn pub-type="epub">1662-453X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fnins.2022.963082</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Neuroscience</subject>
<subj-group>
<subject>Methods</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>A Hierarchical Graph Learning Model for Brain Network Regression Analysis</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Tang</surname> <given-names>Haoteng</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/793781/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Guo</surname> <given-names>Lei</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1889346/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Fu</surname> <given-names>Xiyao</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1889205/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Qu</surname> <given-names>Benjamin</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1889256/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Ajilore</surname> <given-names>Olusola</given-names></name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/108082/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Wang</surname> <given-names>Yalin</given-names></name>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/226907/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Thompson</surname> <given-names>Paul M.</given-names></name>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/6851/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Huang</surname> <given-names>Heng</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1096991/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Leow</surname> <given-names>Alex D.</given-names></name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/111114/overview"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Zhan</surname> <given-names>Liang</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x0002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/115414/overview"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Department of Electrical and Computer Engineering, University of Pittsburgh</institution>, <addr-line>Pittsburgh, PA</addr-line>, <country>United States</country></aff>
<aff id="aff2"><sup>2</sup><institution>Mission San Jose High School</institution>, <addr-line>Fremont, CA</addr-line>, <country>United States</country></aff>
<aff id="aff3"><sup>3</sup><institution>Department of Psychiatry, University of Illinois Chicago</institution>, <addr-line>Chicago, IL</addr-line>, <country>United States</country></aff>
<aff id="aff4"><sup>4</sup><institution>Department of Computer Science and Engineering, Arizona State University</institution>, <addr-line>Tempe, AZ</addr-line>, <country>United States</country></aff>
<aff id="aff5"><sup>5</sup><institution>Imaging Genetics Center, University of Southern California</institution>, <addr-line>Los Angeles, CA</addr-line>, <country>United States</country></aff>
<author-notes>
<fn fn-type="edited-by"><p>Edited by: Xi Jiang, University of Electronic Science and Technology of China, China</p></fn>
<fn fn-type="edited-by"><p>Reviewed by: Li Wang, University of North Carolina at Chapel Hill, United States; Baiying Lei, Shenzhen University, China</p></fn>
<corresp id="c001">&#x0002A;Correspondence: Liang Zhan <email>liang.zhan&#x00040;pitt.edu</email></corresp>
<fn fn-type="other" id="fn001"><p>This article was submitted to Brain Imaging Methods, a section of the journal Frontiers in Neuroscience</p></fn></author-notes>
<pub-date pub-type="epub">
<day>12</day>
<month>07</month>
<year>2022</year>
</pub-date>
<pub-date pub-type="collection">
<year>2022</year>
</pub-date>
<volume>16</volume>
<elocation-id>963082</elocation-id>
<history>
<date date-type="received">
<day>07</day>
<month>06</month>
<year>2022</year>
</date>
<date date-type="accepted">
<day>22</day>
<month>06</month>
<year>2022</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x000A9; 2022 Tang, Guo, Fu, Qu, Ajilore, Wang, Thompson, Huang, Leow and Zhan.</copyright-statement>
<copyright-year>2022</copyright-year>
<copyright-holder>Tang, Guo, Fu, Qu, Ajilore, Wang, Thompson, Huang, Leow and Zhan</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p></license> </permissions>
<abstract>
<p>Brain networks have attracted increasing attention due to the potential to better characterize brain dynamics and abnormalities in neurological and psychiatric conditions. Recent years have witnessed enormous successes in deep learning. Many AI algorithms, especially graph learning methods, have been proposed to analyze brain networks. An important issue for existing graph learning methods is that those models are not typically easy to interpret. In this study, we proposed an interpretable graph learning model for brain network regression analysis. We applied this new framework on the subjects from Human Connectome Project (HCP) for predicting multiple Adult Self-Report (ASR) scores. We also use one of the ASR scores as the example to demonstrate how to identify sex differences in the regression process using our model. In comparison with other state-of-the-art methods, our results clearly demonstrate the superiority of our new model in effectiveness, fairness, and transparency.</p></abstract>
<kwd-group>
<kwd>multimodal brain networks</kwd>
<kwd>human connectome project</kwd>
<kwd>graph learning</kwd>
<kwd>interpretable AI</kwd>
<kwd>adult self-report score</kwd>
</kwd-group>
<counts>
<fig-count count="4"/>
<table-count count="3"/>
<equation-count count="8"/>
<ref-count count="99"/>
<page-count count="12"/>
<word-count count="9268"/>
</counts>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="s1">
<title>1. Introduction</title>
<p>Understanding brain structural and functional changes and its relationship to other phenotypes (e.g., behavior and demographical variables or clinical outcomes) are of prime importance in the neuroscience field. One of the key research directions is to use neuroimaging data for predictive or regression analyses and identify phenotype-related imaging biomarkers. Many previous studies (Rusinek et al., <xref ref-type="bibr" rid="B60">2003</xref>; Sabuncu et al., <xref ref-type="bibr" rid="B61">2015</xref>; Seo et al., <xref ref-type="bibr" rid="B62">2015</xref>; Duffy et al., <xref ref-type="bibr" rid="B18">2018</xref>; Kim et al., <xref ref-type="bibr" rid="B34">2019</xref>) focus on predicting phenotypes using imaging features from voxels or region-of-interests (ROIs). However, increasing evidences show that most of the phenotypes are the outcomes of the interactions among many brain regions (Lehrer, <xref ref-type="bibr" rid="B42">2009</xref>; Van Den Heuvel et al., <xref ref-type="bibr" rid="B77">2012</xref>; Sporns, <xref ref-type="bibr" rid="B67">2013</xref>; Mattar and Bassett, <xref ref-type="bibr" rid="B50">2019</xref>), therefore, using brain network for this prediction task attracts more and more attentions. Brain network (Sporns et al., <xref ref-type="bibr" rid="B68">2004</xref>; Power et al., <xref ref-type="bibr" rid="B58">2010</xref>; Sporns, <xref ref-type="bibr" rid="B66">2011</xref>) represents a 3D brain graph model, comprising the nodes and the edges connecting to the nodes. The nodes are brain ROIs and the edges can be defined using diffusion-MRI derived fiber tracking or functional-MRI-derived correlation. Brain network has the potential to gain system-level insights into the brain dynamics related to those phenotypes.</p>
<p>Many studies have been conducted to relate brain networks to behavioral, clinical measures or demographical variables and identify the most predictive network features (Eichele et al., <xref ref-type="bibr" rid="B19">2008</xref>; Uddin et al., <xref ref-type="bibr" rid="B76">2013</xref>; Brown et al., <xref ref-type="bibr" rid="B7">2017</xref>; Beaty et al., <xref ref-type="bibr" rid="B4">2018</xref>; Tang et al., <xref ref-type="bibr" rid="B71">2019</xref>, <xref ref-type="bibr" rid="B72">2022</xref>; Li C. et al., <xref ref-type="bibr" rid="B43">2020</xref>). However, most of these studies (Chennu et al., <xref ref-type="bibr" rid="B10">2017</xref>; Li et al., <xref ref-type="bibr" rid="B45">2017</xref>; Warren et al., <xref ref-type="bibr" rid="B86">2017</xref>; Du et al., <xref ref-type="bibr" rid="B17">2019</xref>; D&#x00301;&#x00131;az-Arteche and Rakesh, <xref ref-type="bibr" rid="B15">2020</xref>; Kuo et al., <xref ref-type="bibr" rid="B37">2020</xref>) focus on exploring correlations between the pre-defined network features (e.g., clustering coefficient, small-worldness, characteristic path length, etc.) and the measures to be predicted (such as cognitive impairment, biological variables, behavior profile, psychopathological scores, etc.). This may be sub-optimal since those derived brain network features contain less information than the original networks and may ignore important brain network attributes. Although using the entire brain network for the task can solve this issue, it will introduce another challenge in how to handle the high dimensional network data during the task. Obviously, the traditional linear regression method may not be a good choice and more advanced methods (Sz&#x000E9;kely et al., <xref ref-type="bibr" rid="B70">2007</xref>; Sz&#x000E9;kely and Rizzo, <xref ref-type="bibr" rid="B69">2009</xref>; Simpson et al., <xref ref-type="bibr" rid="B64">2011</xref>, <xref ref-type="bibr" rid="B65">2012</xref>; Varoquaux and Craddock, <xref ref-type="bibr" rid="B79">2013</xref>; Craddock et al., <xref ref-type="bibr" rid="B11">2015</xref>; Dai et al., <xref ref-type="bibr" rid="B13">2017</xref>; Wang et al., <xref ref-type="bibr" rid="B85">2017</xref>; Zhang et al., <xref ref-type="bibr" rid="B98">2019b</xref>; Xia et al., <xref ref-type="bibr" rid="B89">2020</xref>; Lehmann et al., <xref ref-type="bibr" rid="B41">2021</xref>; Tomlinson et al., <xref ref-type="bibr" rid="B74">2021</xref>) have been proposed for this purpose. Additionally, recently years have witnessed a great success in the deep learning tools which have been widely used to discover the biological characteristics of brain network-phenotype associations (Hu et al., <xref ref-type="bibr" rid="B31">2016</xref>; Ju et al., <xref ref-type="bibr" rid="B33">2017</xref>; Mirakhorli et al., <xref ref-type="bibr" rid="B51">2020</xref>).</p>
<p>To analyze the complex network data (e.g., brain networks), deep graph learning techniques (Kipf and Welling, <xref ref-type="bibr" rid="B36">2016</xref>; Hamilton et al., <xref ref-type="bibr" rid="B28">2017</xref>; Veli&#x0010D;kovi&#x00107; et al., <xref ref-type="bibr" rid="B80">2017</xref>; Gao et al., <xref ref-type="bibr" rid="B24">2018</xref>; Zhang and Huang, <xref ref-type="bibr" rid="B96">2019</xref>; Zhang et al., <xref ref-type="bibr" rid="B97">2019a</xref>) have gained significant attention. A typical category of deep graph learning techniques are the graph neural networks (GNNs), which are proposed based on the message passing mechanism. In general, GNNs can be summarized as (1) message aggregation across nodes and (2) message transformation (e.g., non-linear transformation) as updated node features. A graph convolution operation in GNNs enables each graph node to aggregate information from its neighbor nodes. Generally, one graph convolution layer can enable the graph node to aggregate local information from one-hop neighbors (i.e., directly connected nodes), while stacked graph convolution layers may enable the graph node to aggregate higher-level information from multi-hops neighbors (Dehmamy et al., <xref ref-type="bibr" rid="B14">2019</xref>), where richer semantic information can be found. However, when stacking too many graph convolution layers, not only the effective information will be captured but also much noise will be introduced, which will break the network representation (Li et al., <xref ref-type="bibr" rid="B44">2018</xref>; Chen et al., <xref ref-type="bibr" rid="B9">2020</xref>). Therefore, an important issue for current graph learning methods is how to effectively capture the higher-level brain network features. Another issue for current graph learning techniques is that the models are not easy to interpret. Although many existing graph learning methods may well achieve good predictive performances for certain tasks (e.g., classification of diseases or prediction of clinical scores), they might be difficult to provide meaningful biological explanations or heuristic insights into the results (Wee et al., <xref ref-type="bibr" rid="B87">2019</xref>; Xuan et al., <xref ref-type="bibr" rid="B90">2019</xref>; Li Y. et al., <xref ref-type="bibr" rid="B47">2020</xref>; Wang et al., <xref ref-type="bibr" rid="B84">2021</xref>). This should be attributed to the black-box nature of the neural networks. Although it is easy to know what the neural network predicts (i.e., the output of the black-box model), it is difficult to understand how the neural networks make the decision (i.e., heuristic intermediate results inside the black box). To address these, a few recent studies (Cui et al., <xref ref-type="bibr" rid="B12">2021</xref>; Li et al., <xref ref-type="bibr" rid="B46">2021</xref>) have been conducted to explore interpretable discoveries from deep graph models on brain networks. However, Cui et al. (<xref ref-type="bibr" rid="B12">2021</xref>) focuses on explaining the message passing mechanism across the brain ROIs while ignoring the high-level network patterns within the brain networks. Li et al. (<xref ref-type="bibr" rid="B46">2021</xref>) tries to explain how the model generates high-level network patterns based on the graph communities. However, they only preserve the center node and discard all other nodes in the communities during the designed pooling operation.</p>
<p>In this work, we propose a new explainable graph representation learning framework and illustrate our method on a task predicting behavioral measures from multi-model brain connectomes in young healthy adults. We hypothesize that the intrinsic higher-level graph patterns can be preserved from the graph communities in brain networks in a hierarchical manner. Based on this assumption, we design a graph community pooling module to summarize the higher-order graph patterns. This hierarchical patterns from brain networks can be used to guide the information flow during the AI model training and increase the transparency and interpretability of the model. We demonstrate this new framework by predicting several behavioral measures using the entire brain network for each gender and investigate whether there is any significant sex difference in the results. The main contributions are summarized as follows:</p>
<list list-type="bullet">
<list-item><p>We propose a new interpretable hierarchical graph representation learning framework for brain network regression analysis.</p></list-item>
<list-item><p>Comparing to state-of-the-arts methods, the regression results on Human Connectome Project (HCP) dataset demonstrate the superiority of our proposed framework.</p></list-item>
<list-item><p>In order to explore the interpretability of our framework, we adopt graph saliency maps to highlight brain regions selected by the model and provide biological explanations.</p></list-item>
</list></sec>
<sec id="s2">
<title>2. Data Description</title>
<p>The brain network data used in this study was obtained from Zhang et al. (<xref ref-type="bibr" rid="B95">2020</xref>), which we summarize below. The original data was from the Human Connectome Project (HCP) 1200 Subjects Data Release (Van Essen et al., <xref ref-type="bibr" rid="B78">2013</xref>). 246 region-of-interests (ROIs) from the Brainnetome atlas (Fan et al., <xref ref-type="bibr" rid="B21">2016</xref>) was adopted to define the resting-state functional network and diffusion-MRI-derived structural network. Functional network was computed using CONN toolbox (Whitfield-Gabrieli and Nieto-Castanon, <xref ref-type="bibr" rid="B88">2012</xref>) and structural network was processed using FSL bedpostx (Behrens et al., <xref ref-type="bibr" rid="B6">2003</xref>) and probtrackx (Behrens et al., <xref ref-type="bibr" rid="B5">2007</xref>). The reconstructing pipelines for these two brain networks (Ajilore et al., <xref ref-type="bibr" rid="B2">2013</xref>; Zhan et al., <xref ref-type="bibr" rid="B93">2015</xref>) have been described in our previous publications. In order to evaluate our framework, we selected 10 Achenbach Adult Self-Report (ASR) (Achenbach and Rescorla, <xref ref-type="bibr" rid="B1">2003</xref>) measures from each subject as our prediction objectives. These 10 measures include: Anxious/Depressed Score (ANXD), Withdrawn Score (WITD), Somatic Complaints Score (SOMA), Thought Problems Score (THOT), Attention Problems Score (ATTN), Aggressive Behavior Score (AGGR), Rule Breaking Behavior Score (RULE), Intrusive Score (INTR), Internalizing Score (INTN), and Externalizing Score (EXTN). After quality control assessment of head motion and global signal changes for both scan types (diffusion MRI and resting-state fMRI) and removal of those with missing data, we included 738 young healthy subjects (mean age &#x0003D; 28.62&#x000B1;3.67, 337 males) in our study.</p>
<p>In sum, each subject has a 246 &#x000D7; 246 structural network from diffusion MRI, a 246 &#x000D7; 246 functional network from resting-state fMRI, and 10 ASR scores. <xref ref-type="table" rid="T1">Table 1</xref> summarizes the ASR statistics for each gender and details of the HCP dataset can be found in footnote 1.<xref ref-type="fn" rid="fn0001"><sup>1</sup></xref></p>
<table-wrap position="float" id="T1">
<label>Table 1</label>
<caption><p>Subjects&#x00027; statistics for 10 ASR scores.</p></caption>
<table frame="hsides" rules="groups">
<thead><tr>
<th valign="top" align="left"><bold>ASR score</bold></th>
<th valign="top" align="center"><bold>Male</bold></th>
<th valign="top" align="center"><bold>Female</bold></th>
<th valign="top" align="center"><bold><italic>P</italic></bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">ANXD</td>
<td valign="top" align="center">54.58 &#x000B1; 6.76</td>
<td valign="top" align="center">53.91 &#x000B1; 6.09</td>
<td valign="top" align="center">1.60<sup>&#x02212;1</sup></td>
</tr>
<tr>
<td valign="top" align="left">WITD</td>
<td valign="top" align="center">54.77 &#x000B1; 6.34</td>
<td valign="top" align="center">53.02 &#x000B1; 5.32</td>
<td valign="top" align="center">5.38<sup>&#x02212;5</sup></td>
</tr>
<tr>
<td valign="top" align="left">SOMA</td>
<td valign="top" align="center">54.13 &#x000B1; 6.05</td>
<td valign="top" align="center">53.97 &#x000B1; 6.04</td>
<td valign="top" align="center">7.30<sup>&#x02212;1</sup></td>
</tr>
<tr>
<td valign="top" align="left">THOT</td>
<td valign="top" align="center">54.47 &#x000B1; 5.86</td>
<td valign="top" align="center">53.57 &#x000B1; 5.75</td>
<td valign="top" align="center">3.60<sup>&#x02212;2</sup></td>
</tr>
<tr>
<td valign="top" align="left">ATTN</td>
<td valign="top" align="center">55.89 &#x000B1; 5.54</td>
<td valign="top" align="center">54.31 &#x000B1; 5.68</td>
<td valign="top" align="center">1.55<sup>&#x02212;4</sup></td>
</tr>
<tr>
<td valign="top" align="left">AGGR</td>
<td valign="top" align="center">53.32 &#x000B1; 4.83</td>
<td valign="top" align="center">52.47 &#x000B1; 3.71</td>
<td valign="top" align="center">6.76<sup>&#x02212;3</sup></td>
</tr>
<tr>
<td valign="top" align="left">RULE</td>
<td valign="top" align="center">54.90 &#x000B1; 6.17</td>
<td valign="top" align="center">53.49 &#x000B1; 4.73</td>
<td valign="top" align="center">5.09<sup>&#x02212;4</sup></td>
</tr>
<tr>
<td valign="top" align="left">INTR</td>
<td valign="top" align="center">54.33 &#x000B1; 5.95</td>
<td valign="top" align="center">53.27 &#x000B1; 4.79</td>
<td valign="top" align="center">7.65<sup>&#x02212;3</sup></td>
</tr>
<tr>
<td valign="top" align="left">INTN</td>
<td valign="top" align="center">49.59 &#x000B1; 11.34</td>
<td valign="top" align="center">48.44 &#x000B1; 10.29</td>
<td valign="top" align="center">1.50<sup>&#x02212;1</sup></td>
</tr>
<tr>
<td valign="top" align="left">EXTN</td>
<td valign="top" align="center">50.78 &#x000B1; 8.90</td>
<td valign="top" align="center">47.59 &#x000B1; 9.04</td>
<td valign="top" align="center">1.85<sup>&#x02212;6</sup></td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p><italic>The two columns, corresponding to Male and Female groups, are reported as the mean &#x000B1; standard deviation values. The last column is the student t-test P-value to show whether there is any significant sex difference for each ASR score</italic>.</p>
</table-wrap-foot>
</table-wrap></sec>
<sec sec-type="methods" id="s3">
<title>3. Methods</title>
<p>In this section, we first provide some preliminaries for graph learning. Then, we will explain our new framework, in which we will delve into the proposed graph pooling layer which down-scales the brain network and generates the coarse representation of brain network based on the network communities. Finally, we will briefly describe the training procedure to show that our proposed framework can be trained in an end-to-end manner.</p>
<sec>
<title>3.1. Preliminaries of Graph Learning</title>
<sec>
<title>3.1.1. Graph Notation</title>
<p>We denote any attributed graph (i.e., brain network) with <italic>N</italic> nodes as <italic>G</italic> &#x0003D; (<italic>A, X</italic>). <inline-formula><mml:math id="M1"><mml:mi>A</mml:mi><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x000D7;</mml:mo><mml:mi>N</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> is the graph adjacency matrix saving the node connections in the graph which can be defined as:</p>
<disp-formula id="E1"><label>(1)</label><mml:math id="M"><mml:mrow><mml:msub><mml:mi>A</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mrow><mml:mtable columnalign='left'><mml:mtr columnalign='left'><mml:mtd columnalign='left'><mml:mrow><mml:mi>e</mml:mi><mml:mi>d</mml:mi><mml:mi>g</mml:mi><mml:mi>e</mml:mi><mml:mtext>&#x000A0;</mml:mtext><mml:mi>w</mml:mi><mml:mi>e</mml:mi><mml:mi>i</mml:mi><mml:mi>g</mml:mi><mml:mi>h</mml:mi><mml:mi>t</mml:mi></mml:mrow></mml:mtd><mml:mtd columnalign='left'><mml:mrow><mml:mi>i</mml:mi><mml:mi>f</mml:mi><mml:mtext>&#x000A0;</mml:mtext><mml:mi>n</mml:mi><mml:mi>o</mml:mi><mml:mi>d</mml:mi><mml:mi>e</mml:mi><mml:mtext>&#x000A0;</mml:mtext><mml:mi>i</mml:mi><mml:mtext>&#x000A0;</mml:mtext><mml:mi>c</mml:mi><mml:mi>o</mml:mi><mml:mi>n</mml:mi><mml:mi>n</mml:mi><mml:mi>e</mml:mi><mml:mi>c</mml:mi><mml:mi>t</mml:mi><mml:mi>s</mml:mi><mml:mtext>&#x000A0;</mml:mtext><mml:mi>t</mml:mi><mml:mi>o</mml:mi><mml:mtext>&#x000A0;</mml:mtext><mml:mi>n</mml:mi><mml:mi>o</mml:mi><mml:mi>d</mml:mi><mml:mi>e</mml:mi><mml:mtext>&#x000A0;</mml:mtext><mml:mi>j</mml:mi></mml:mrow></mml:mtd></mml:mtr><mml:mtr columnalign='left'><mml:mtd columnalign='left'><mml:mn>0</mml:mn></mml:mtd><mml:mtd columnalign='left'><mml:mrow><mml:mi>o</mml:mi><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>w</mml:mi><mml:mi>i</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mo>.</mml:mo></mml:mrow></mml:mtd></mml:mtr></mml:mtable></mml:mrow></mml:mrow></mml:mrow></mml:math></disp-formula>
<p>Particularly, in the functional brain networks, the edge weights measures the relationships between the BOLD signals of different brain regions (e.g., <italic>A</italic><sub><italic>ij</italic></sub> is the Pearson Correlation of BOLD signals between brain node <italic>i</italic> and <italic>j</italic>) (Bathelt et al., <xref ref-type="bibr" rid="B3">2013</xref>; Fischer et al., <xref ref-type="bibr" rid="B23">2014</xref>). By contrast, in the diffusion MRI-derived structural networks, the edge weights describe the connectivity of white matter tracts between brain regions. <inline-formula><mml:math id="M3"><mml:mi>X</mml:mi><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x000D7;</mml:mo><mml:mi>d</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> is the node feature matrix, where the dimension of the feature is <italic>d</italic>. We also denote <inline-formula><mml:math id="M4"><mml:mi>Z</mml:mi><mml:mo>=</mml:mo><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mi>Z</mml:mi></mml:mrow><mml:mrow><mml:mn>1</mml:mn><mml:mo>:</mml:mo></mml:mrow></mml:msub><mml:mo>,</mml:mo><mml:msub><mml:mrow><mml:mi>Z</mml:mi></mml:mrow><mml:mrow><mml:mn>2</mml:mn><mml:mo>:</mml:mo></mml:mrow></mml:msub><mml:mo>,</mml:mo><mml:mo>&#x02026;</mml:mo><mml:mo>,</mml:mo><mml:msub><mml:mrow><mml:mi>Z</mml:mi></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>:</mml:mo></mml:mrow></mml:msub></mml:mrow><mml:mo>]</mml:mo></mml:mrow><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x000D7;</mml:mo><mml:mi>c</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> as the latent feature matrix embedded by the graph convolution layers, where <italic>c</italic> is the dimension of the node latent features. <inline-formula><mml:math id="M5"><mml:msub><mml:mrow><mml:mi>Z</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>:</mml:mo></mml:mrow></mml:msub><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mi>c</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> is the <italic>i</italic>-th row of matrix <italic>Z</italic> representing the latent feature of the <italic>i</italic>-th node. Given a set of labeled data <inline-formula><mml:math id="M6"><mml:mrow><mml:mi mathvariant="-tex-caligraphic">D</mml:mi></mml:mrow><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mi>G</mml:mi></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:mo>,</mml:mo><mml:msub><mml:mrow><mml:mi>y</mml:mi></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mi>G</mml:mi></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msub><mml:mo>,</mml:mo><mml:msub><mml:mrow><mml:mi>y</mml:mi></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>,</mml:mo><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mi>G</mml:mi></mml:mrow><mml:mrow><mml:mn>3</mml:mn></mml:mrow></mml:msub><mml:mo>,</mml:mo><mml:msub><mml:mrow><mml:mi>y</mml:mi></mml:mrow><mml:mrow><mml:mn>3</mml:mn></mml:mrow></mml:msub></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>,</mml:mo><mml:mo>&#x02026;</mml:mo></mml:mrow><mml:mo>}</mml:mo></mml:mrow></mml:math></inline-formula> where <inline-formula><mml:math id="M7"><mml:msub><mml:mrow><mml:mi>y</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>&#x02208;</mml:mo><mml:mrow><mml:mi mathvariant="-tex-caligraphic">Y</mml:mi></mml:mrow></mml:math></inline-formula> is the regression value to the corresponding graph <inline-formula><mml:math id="M8"><mml:msub><mml:mrow><mml:mi>G</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>&#x02208;</mml:mo><mml:mrow><mml:mi mathvariant="-tex-caligraphic">G</mml:mi></mml:mrow></mml:math></inline-formula>, the graph regression task is learning a mapping, <inline-formula><mml:math id="M9"><mml:mi>f</mml:mi><mml:mo>:</mml:mo><mml:mrow><mml:mi mathvariant="-tex-caligraphic">G</mml:mi></mml:mrow><mml:mo>&#x02192;</mml:mo><mml:mrow><mml:mi mathvariant="-tex-caligraphic">Y</mml:mi></mml:mrow></mml:math></inline-formula>.</p></sec>
<sec>
<title>3.1.2. Graph Neural Network</title>
<p>Graph Neural Network (GNN) is an effective message-passing architecture to embed the graph nodes as well as their local structures. In general, GNN layer can be formulated as:</p>
<disp-formula id="E2"><label>(2)</label><mml:math id="M10"><mml:mtable class="eqnarray" columnalign="right center left"><mml:mtr><mml:mtd><mml:mi>Z</mml:mi><mml:mo>=</mml:mo><mml:mi>F</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>A</mml:mi><mml:mo>,</mml:mo><mml:mi>Z</mml:mi><mml:mo>;</mml:mo><mml:mi>&#x003B8;</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math></disp-formula>
<p>where &#x003B8; is the trainable parameters.</p>
<p><italic>F</italic>(&#x000B7;) is the forward function of GNN layer to combine and transform the messages across the graph nodes. Different expressions of <italic>F</italic>(&#x000B7;) are proposed in the previous work such as Graph Convolution Network (GCN) (Kipf and Welling, <xref ref-type="bibr" rid="B36">2016</xref>) and Graph Attention Network (GAT) (Veli&#x0010D;kovi&#x00107; et al., <xref ref-type="bibr" rid="B80">2017</xref>). In this work, we adopt GCN to generate the node latent features. Following Kipf and Welling (<xref ref-type="bibr" rid="B36">2016</xref>), the layer of the graph neural network (i.e., Equation 2) can be instantiated as:</p>
<disp-formula id="E3"><label>(3)</label><mml:math id="M11"><mml:mtable class="eqnarray" columnalign="right center left"><mml:mtr><mml:mtd><mml:mi>Z</mml:mi><mml:mo>=</mml:mo><mml:mi>&#x003C3;</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msup><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>D</mml:mi></mml:mrow><mml:mo>&#x0007E;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mo>-</mml:mo><mml:mfrac><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:mfrac></mml:mrow></mml:msup><mml:mi>&#x000C3;</mml:mi><mml:msup><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>D</mml:mi></mml:mrow><mml:mo>&#x0007E;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mo>-</mml:mo><mml:mfrac><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:mfrac></mml:mrow></mml:msup><mml:mi>X</mml:mi><mml:mi>&#x003B8;</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math></disp-formula>
<p>where &#x000C3; &#x0003D; <italic>A</italic>&#x0002B;<italic>I</italic>, <inline-formula><mml:math id="M12"><mml:msub><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>D</mml:mi></mml:mrow><mml:mo>&#x0007E;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:munder class="msub"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:mo>:</mml:mo><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:munder><mml:msub><mml:mrow><mml:mi>&#x000C3;</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> is the degree matrix, &#x003C3;(&#x000B7;) is a non-linear activation function (e.g., ReLU).</p></sec></sec>
<sec>
<title>3.2. Brain Network Representation Learning Framework</title>
<p>The goal of this new brain network representation learning framework is to capture community structures of brain networks in a hierarchical manner, and to generate a representation of the whole brain network based on the preserved community information. Moreover, the proposed framework should be able to utilize derived brain network representations to achieve graph-level learning tasks (e.g., graph regression). The proposed brain network representation learning framework, as shown in <xref ref-type="fig" rid="F1">Figure 1</xref>, consists of three components which are (1) nodes and local structures embedding modules, (2) community-based brain network pooling modules and (3) a task-specific prediction module. In the nodes and local structures embedding module, graph convolution layers are deployed to embed the brain network nodes and the corresponding local structures into the latent feature space. In stead of using single graph convolution layer (i.e., 1 GCN layer), we here deploy stacked graph convolution layers (i.e., stacked GCN layers, Dehmamy et al., <xref ref-type="bibr" rid="B14">2019</xref>) which can promote each graph node to aggregate higher order information from a broader receptive field (i.e., to capture the information beyond one-hop neighborhoods to several-hops neighborhoods).</p>
<fig id="F1" position="float">
<label>Figure 1</label>
<caption><p>Diagram of the proposed hierarchical brain network learning framework, including stacked graph convolution layers, community pooling modules, and an Multilayer perceptron (MLP) block for the regression task. The workflow details of the proposed community pooling module are presented in the red box, including: <bold>(A)</bold> Compute the center node probability (<inline-formula><mml:math id="M13"><mml:mrow><mml:mi mathvariant="-tex-caligraphic">P</mml:mi></mml:mrow></mml:math></inline-formula>) and select the nodes with top-M <inline-formula><mml:math id="M14"><mml:mrow><mml:mi mathvariant="-tex-caligraphic">P</mml:mi></mml:mrow></mml:math></inline-formula> scores as center nodes. <bold>(B)</bold> Assign each node into the closest community. <bold>(C)</bold> Aggregate features of community member nodes to the corresponding center node and down scale the graph based on the captured communities.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnins-16-963082-g0001.tif"/>
</fig>
<p>Given a brain network (i.e., <italic>G</italic> &#x0003D; (<italic>A, X</italic>)), the nodes and local structures embedding module can embed the network node features <inline-formula><mml:math id="M15"><mml:mi>X</mml:mi><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x000D7;</mml:mo><mml:mi>d</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> with its local structures <inline-formula><mml:math id="M16"><mml:mi>A</mml:mi><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x000D7;</mml:mo><mml:mi>N</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> in to the latent space as node latent features <inline-formula><mml:math id="M17"><mml:mi>Z</mml:mi><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x000D7;</mml:mo><mml:mi>c</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula>. The next question is that how to use these node latent features to generate the high-level graph representations? The graph convolution layers focus on the node-level representation learning and only propagate information across edges of the graph in a &#x0201C;flat&#x0201D; way (Ying et al., <xref ref-type="bibr" rid="B91">2018</xref>; Tang et al., <xref ref-type="bibr" rid="B73">2021</xref>). Some previous studies (Lin et al., <xref ref-type="bibr" rid="B49">2013</xref>; Li et al., <xref ref-type="bibr" rid="B48">2015</xref>; Vinyals et al., <xref ref-type="bibr" rid="B82">2015</xref>; Zhang et al., <xref ref-type="bibr" rid="B94">2018</xref>) adopted global pooling which sums, averages or concatenates all the node features as the graph-level representation and use it for graph-level tasks (e.g., graph classification, graph similarity learning). However, these methods may ignore the hierarchical structures during the global pooling process, which leads to the models ineffective in graph-level tasks. To address this issue, our proposed brain network pooling module down scales the network from <italic>N</italic> nodes to <italic>M</italic>(&#x0003C; <italic>N</italic>) nodes based on the network community which is an important graph hierarchical structures. Specifically, the proposed brain network pooling can down scale the network latent features <inline-formula><mml:math id="M18"><mml:mi>Z</mml:mi><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x000D7;</mml:mo><mml:mi>c</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> to <inline-formula><mml:math id="M19"><mml:mi>&#x01E90;</mml:mi><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>M</mml:mi><mml:mo>&#x000D7;</mml:mo><mml:mi>c</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula>. Details of the proposed brain network pooling module are discussed in the next subsection.</p>
<p>After the network pooling, a <bold>readout</bold> operation is adopted to summary the whole graph representation at the current scale of the graph. Assume that we obtain the network latent feature matrix <inline-formula><mml:math id="M20"><mml:mi>&#x01E90;</mml:mi><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>M</mml:mi><mml:mo>&#x000D7;</mml:mo><mml:mi>c</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> from the network pooling module, the readout operation generates the whole graph representation <inline-formula><mml:math id="M21"><mml:msub><mml:mrow><mml:mi>Z</mml:mi></mml:mrow><mml:mrow><mml:mi>G</mml:mi></mml:mrow></mml:msub><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mi>c</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> by a linear layer with an activation function:</p>
<disp-formula id="E4"><label>(4)</label><mml:math id="M22"><mml:mtable class="eqnarray" columnalign="right center left"><mml:mtr><mml:mtd><mml:msub><mml:mrow><mml:mi>Z</mml:mi></mml:mrow><mml:mrow><mml:mi>G</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mi>&#x003C3;</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>W</mml:mi><mml:mi>&#x01E90;</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math></disp-formula>
<p>where <inline-formula><mml:math id="M23"><mml:mi>W</mml:mi><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mn>1</mml:mn><mml:mo>&#x000D7;</mml:mo><mml:mi>M</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> is the trainable parameters within the linear layer and &#x003C3;(&#x000B7;) is an activation function (i.e., ReLu).</p>
<p>In the task-specific prediction module, we first fuse (e.g., concatenate, sum, average, etc.) all the graph representation <italic>Z</italic><sub><italic>G</italic></sub> obtained in different scales of graphs as the hierarchical graph representation for the further graph-level prediction (i.e., graph regression in this work). Then, an Multilayers Perception (MLP) is deployed to utilize the hierarchical graph representation for the graph regression task.</p></sec>
<sec>
<title>3.3. Brain Network Pooling</title>
<p>As mentioned before, the brain network pooling module down scales the node latent features <inline-formula><mml:math id="M24"><mml:mi>Z</mml:mi><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x000D7;</mml:mo><mml:mi>c</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> to the <inline-formula><mml:math id="M25"><mml:mi>&#x01E90;</mml:mi><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>M</mml:mi><mml:mo>&#x000D7;</mml:mo><mml:mi>c</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> based on the network community structures. To achieve this, two basic steps are involved in the brain network pooling module including network community partition and community representation. We will discuss these two steps in sequence.</p>
<sec>
<title>3.3.1. Network Community Partition</title>
<p>To partition the network nodes and generate the network community, the pooling module will first identify the community center nodes and then assign other nodes to the nearest community. Inspired by the density-based partition methods (Ester et al., <xref ref-type="bibr" rid="B20">1996</xref>; Heuvel van den and Sporns, <xref ref-type="bibr" rid="B30">2013</xref>) that community center nodes are always densely encircled by a group of nodes with a high probability, we compute the feature distance (i.e., Euclidean distance of feature vector) as a metric to approximate the probability that measures the possibility for a node to be a center node. Specifically, a node with a smaller feature distances to all other nodes is more likely to be a community center. Based on node feature vectors, we construct the probability vector, <inline-formula><mml:math id="M26"><mml:mrow><mml:mi mathvariant="-tex-caligraphic">P</mml:mi></mml:mrow><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msup></mml:math></inline-formula> to measure the possibility that each node to be a community center node where <inline-formula><mml:math id="M27"><mml:mrow><mml:mi mathvariant="-tex-caligraphic">P</mml:mi></mml:mrow></mml:math></inline-formula> is formulated as:</p>
<disp-formula id="E5"><label>(5)</label><mml:math id="M28"><mml:mtable class="eqnarray" columnalign="right center left"><mml:mtr><mml:mtd><mml:mrow><mml:mi mathvariant="-tex-caligraphic">P</mml:mi></mml:mrow><mml:mo>=</mml:mo><mml:mi>s</mml:mi><mml:mi>o</mml:mi><mml:mi>f</mml:mi><mml:mi>t</mml:mi><mml:mi>m</mml:mi><mml:mi>a</mml:mi><mml:mi>x</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mn>1</mml:mn><mml:mo>-</mml:mo><mml:mi>n</mml:mi><mml:mi>o</mml:mi><mml:mi>r</mml:mi><mml:mi>m</mml:mi><mml:mi>a</mml:mi><mml:mi>l</mml:mi><mml:mi>i</mml:mi><mml:mi>z</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:mo>[</mml:mo><mml:mrow><mml:mstyle displaystyle="true"><mml:munderover accentunder="false" accent="false"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:mi>j</mml:mi><mml:mo>=</mml:mo><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:mi>N</mml:mi></mml:mrow></mml:munderover></mml:mstyle><mml:msub><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo>]</mml:mo></mml:mrow></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math></disp-formula>
<p>where <italic>S</italic> (i.e., <italic>S</italic><sub><italic>i, j</italic></sub> &#x0003D; ||<italic>Z</italic><sub><italic>i</italic></sub>&#x02212;<italic>Z</italic><sub><italic>j</italic></sub>||<sub><italic>L</italic><sub>1</sub></sub>) is the feature distance matrix. Finally, we select <italic>M</italic> nodes with Top-M <inline-formula><mml:math id="M29"><mml:mrow><mml:mi mathvariant="-tex-caligraphic">P</mml:mi></mml:mrow></mml:math></inline-formula>-values as <italic>M</italic> community center nodes.</p></sec>
<sec>
<title>3.3.2. Community Representation</title>
<p>When we identify <italic>M</italic> community center nodes, we assign other graph nodes to the nearest the community. We denote &#x003A9; &#x0003D; {&#x003A9;<sub>1</sub>, &#x003A9;<sub>2</sub>, &#x02026;, &#x003A9;<sub><italic>M</italic></sub>} as the set of all <italic>M</italic> communities. Then the representation of <italic>i</italic>-th community (i.e., &#x01E90;<sub><italic>i</italic></sub>) can be computed by:</p>
<disp-formula id="E6"><label>(6)</label><mml:math id="M30"><mml:mtable class="eqnarray" columnalign="right center left"><mml:mtr><mml:mtd><mml:msub><mml:mrow><mml:mi>&#x01E90;</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:msub><mml:mrow><mml:mi>Z</mml:mi></mml:mrow><mml:mrow><mml:msub><mml:mrow><mml:mi>c</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:msub><mml:mo>&#x0002B;</mml:mo><mml:mstyle displaystyle="true"><mml:munder class="msub"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:msub><mml:mrow><mml:mi>v</mml:mi></mml:mrow><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo>&#x02208;</mml:mo><mml:msub><mml:mrow><mml:mi>&#x003A9;</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:munder></mml:mstyle><mml:msub><mml:mrow><mml:mi>Z</mml:mi></mml:mrow><mml:mrow><mml:msub><mml:mrow><mml:mi>v</mml:mi></mml:mrow><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:msub><mml:mo>&#x000B7;</mml:mo><mml:mfrac><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mrow><mml:msub><mml:mrow><mml:mi>S</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi><mml:mo>,</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mfrac><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math></disp-formula>
<p>where <italic>Z</italic><sub><italic>c</italic><sub><italic>i</italic></sub></sub> is the latent feature of the center node of <italic>i</italic>-th community. <italic>v</italic><sub><italic>j</italic></sub> are the community member nodes in the corresponding community.</p></sec></sec>
<sec>
<title>3.4. Supervision Manner for Regression Task</title>
<p>As aforementioned, we fuse all graph representations <italic>Z</italic><sub><italic>G</italic></sub> obtained from different graph scales as the final hierarchical graph representation <inline-formula><mml:math id="M31"><mml:msub><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>Z</mml:mi></mml:mrow><mml:mo>&#x00304;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>G</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula>. Then, an MLP takes <inline-formula><mml:math id="M32"><mml:msub><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>Z</mml:mi></mml:mrow><mml:mo>&#x00304;</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>G</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> as input to generate the regression prediction value &#x00177;. We optimize the Mean squared error (MSE) loss (i.e., &#x02113;<sub><italic>MSE</italic></sub>) to minimize the difference between the ground-truth <italic>y</italic> and the prediction &#x00177;. Meanwhile, to make the feature of community members closer to the corresponding community center node, we minimize:</p>
<disp-formula id="E7"><label>(7)</label><mml:math id="M33"><mml:mtable class="eqnarray" columnalign="right center left"><mml:mtr><mml:mtd><mml:msub><mml:mrow><mml:mi>&#x02113;</mml:mi></mml:mrow><mml:mrow><mml:mi>c</mml:mi><mml:mi>o</mml:mi><mml:mi>m</mml:mi><mml:mi>m</mml:mi><mml:mi>u</mml:mi><mml:mi>n</mml:mi><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>y</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mstyle displaystyle="true"><mml:munder class="msub"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:msub><mml:mrow><mml:mi>&#x003A9;</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub><mml:mo>&#x02208;</mml:mo><mml:mi>&#x003A9;</mml:mi></mml:mrow></mml:munder></mml:mstyle><mml:mstyle displaystyle="true"><mml:munder class="msub"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:msub><mml:mrow><mml:mi>v</mml:mi></mml:mrow><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo>&#x02208;</mml:mo><mml:msub><mml:mrow><mml:mi>&#x003A9;</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:munder></mml:mstyle><mml:msub><mml:mrow><mml:mi>&#x02113;</mml:mi></mml:mrow><mml:mrow><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mi>E</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mi>Z</mml:mi></mml:mrow><mml:mrow><mml:msub><mml:mrow><mml:mi>v</mml:mi></mml:mrow><mml:mrow><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:msub><mml:mo>,</mml:mo><mml:msub><mml:mrow><mml:mi>Z</mml:mi></mml:mrow><mml:mrow><mml:msub><mml:mrow><mml:mi>c</mml:mi></mml:mrow><mml:mrow><mml:mi>i</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:msub></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>.</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math></disp-formula>
<p>The total loss function can be formulated as follows:</p>
<disp-formula id="E8"><label>(8)</label><mml:math id="M34"><mml:mtable class="eqnarray" columnalign="right center left"><mml:mtr><mml:mtd><mml:msub><mml:mrow><mml:mi>L</mml:mi></mml:mrow><mml:mrow><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>g</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:msub><mml:mrow><mml:mi>&#x003B7;</mml:mi></mml:mrow><mml:mrow><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:msub><mml:mrow><mml:mi>&#x02113;</mml:mi></mml:mrow><mml:mrow><mml:mi>M</mml:mi><mml:mi>S</mml:mi><mml:mi>E</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>&#x00177;</mml:mi><mml:mo>,</mml:mo><mml:mi>y</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>&#x0002B;</mml:mo><mml:msub><mml:mrow><mml:mi>&#x003B7;</mml:mi></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msub><mml:msub><mml:mrow><mml:mi>&#x02113;</mml:mi></mml:mrow><mml:mrow><mml:mi>c</mml:mi><mml:mi>o</mml:mi><mml:mi>m</mml:mi><mml:mi>m</mml:mi><mml:mi>u</mml:mi><mml:mi>n</mml:mi><mml:mi>i</mml:mi><mml:mi>t</mml:mi><mml:mi>y</mml:mi></mml:mrow></mml:msub><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math></disp-formula>
<p>where the &#x003B7;<sub>1</sub> and &#x003B7;<sub>2</sub> are the loss weights. We train the proposed brain network learning framework by minimizing this regression loss and the whole training procedure is therefore in an end-to-end manner.</p></sec></sec>
<sec id="s4">
<title>4. Results and Discussions</title>
<sec>
<title>4.1. Experiment Design and Evaluation</title>
<p>We will apply the proposed framework to predict ASR scores. The prediction performance will be evaluated using Mean Absolute Error (MAE). Since the community pooling module in our framework will select a group of nodes or brain regions, we can identify which brain regions (or brain network nodes) are directly linked to the prediction objects (i.e., ASR score in our study) from the last pooling module. Please be noted that this &#x0201C;link&#x0201D; doesn&#x00027;t mean the direct correlation since the relationship captured by our framework is non-linear by nature. We name these nodes as effecting nodes. And the last community pooling layer in our framework will generate a group of &#x0201C;effecting&#x0201D; nodes. Due to the individual difference, the effecting nodes for each subject are not exact the same. Then we count how many times each node is selected as the effecting node during the testing and normalize this number by the total number of testing subject in each group. The resulted number will be treated as the frequency of this node to be the effecting node. As a result, we can get the nodal frequency distribution for each group (male or female). Then the normalized mutual information (NMI) is used to quantify the group difference between male and female and we adopt permutation approach to evaluate the significance of the group difference.</p></sec>
<sec>
<title>4.2. Experiment Setting</title>
<p>For each prediction task, we randomly split the entire dataset into five disjoint sets for 5-fold cross-validations. All the prediction accuracy are calculated as the mean &#x000B1; standard deviation values obtained from these 5 folders. We utilize the diffusion MRI-derived brain structural networks as the adjacency matrix input of our framework. We treat each row in the resting-state functional network as the feature for each node, so the initial nodal feature dimension is 246. We also consider using Principal Component Analysis (PCA) to reduce the nodal feature dimension. During the training stage, we optimize the parameters in the framework using the Adam optimizer (Kingma and Ba, <xref ref-type="bibr" rid="B35">2015</xref>) with a batch size of 256. The initial learning rate is set to 0.001 and decayed by <inline-formula><mml:math id="M35"><mml:msup><mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mn>1</mml:mn><mml:mo>-</mml:mo><mml:mfrac><mml:mrow><mml:mi>c</mml:mi><mml:mi>u</mml:mi><mml:mi>r</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:mi>t</mml:mi><mml:mstyle class="text"><mml:mtext>_</mml:mtext></mml:mstyle><mml:mi>e</mml:mi><mml:mi>p</mml:mi><mml:mi>o</mml:mi><mml:mi>c</mml:mi><mml:mi>h</mml:mi></mml:mrow><mml:mrow><mml:mi>m</mml:mi><mml:mi>a</mml:mi><mml:mi>x</mml:mi><mml:mstyle class="text"><mml:mtext>_</mml:mtext></mml:mstyle><mml:mi>e</mml:mi><mml:mi>p</mml:mi><mml:mi>o</mml:mi><mml:mi>c</mml:mi><mml:mi>h</mml:mi></mml:mrow></mml:mfrac></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mn>0</mml:mn><mml:mo>.</mml:mo><mml:mn>9</mml:mn></mml:mrow></mml:msup></mml:math></inline-formula>. We also regularize the framework training with an <italic>L</italic><sub>2</sub> weight decay of 1<italic>e</italic><sup>&#x02212;5</sup>. Following the previous studies (Shchur et al., <xref ref-type="bibr" rid="B63">2018</xref>; Lee et al., <xref ref-type="bibr" rid="B38">2019</xref>), we adopt an early stopping criterion if the validation loss did not improve for 20 epochs in an epoch termination condition with a maximum of 500 epochs. We implement all experiments based on PyTorch (Paszke et al., <xref ref-type="bibr" rid="B57">2019</xref>) and the torch-geometric graph learning library (Fey and Lenssen, <xref ref-type="bibr" rid="B22">2019</xref>). All the experiments are deployed on 1 NVIDIA TITAN RTX GPUs.</p></sec>
<sec>
<title>4.3. Prediction Performance</title>
<p>In this section, we put all subjects (male and female) into one group and apply our method to predict ASR scores. We compare the prediction performance of our framework with 7 baseline methods to show the superiority of our framework. Two dimension reduction methods [i.e., PCA and Spectral Clustering (Ng et al., <xref ref-type="bibr" rid="B54">2002</xref>) with linear regression] and five graph neural network (GNN) based models [i.e., Stacked GCN with Global-POOL, SAG-POOL (Lee et al., <xref ref-type="bibr" rid="B38">2019</xref>), DIFFPOOL (Ying et al., <xref ref-type="bibr" rid="B91">2018</xref>), HGP-SL (Zhang et al., <xref ref-type="bibr" rid="B99">2019c</xref>) and StructPOOL (Yuan and Ji, <xref ref-type="bibr" rid="B92">2020</xref>)] with different pooling layers are set as our compared baselines. The GNN based models can co-embed the brain structural networks (i.e., as adjacency matrices) and brain functional networks (i.e., as node feature matrices) into the latent space, however, two dimension reduction methods can only analyze one type of brain networks. To make a fair comparison, we only utilize brain structural networks to present the regression performance here in <xref ref-type="table" rid="T2">Table 2</xref>. Particularly, we conduct two dimension reduction methods on the brain structural networks to reduce the network dimension. Then, the linear regression is adopted on the dimension reduced networks for the regression task. Meanwhile, for the 5 GNN-based baseline models as well as ours, we initialize the node feature matrix by using all-ones vector (i.e., <inline-formula><mml:math id="M36"><mml:mover accent="true"><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mo>&#x02192;</mml:mo></mml:mover><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mrow><mml:mi mathvariant="-tex-caligraphic">R</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mo>&#x000D7;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msup></mml:math></inline-formula>) and only utilize the brain structural networks as the adjacency matrices. For the 5 hierarchical graph pooling models (i.e., SAG-POOL, DIFFPOOL, HGP-SL, StructPOOL and ours), we deployed 3 hierarchical graph pooling modules. <xref ref-type="table" rid="T2">Table 2</xref> shows that our proposed framework achieves the best performance with a lowest regression Mean Absolute Error (MAE) comparing to all other methods. Meanwhile, the GNN-based methods are generally superior to the dimension reduction ones. This may result from that GNN-based methods can better extract the network local and global topological structures which are important to represent the brain networks. Moreover, the group of hierarchical graph pooling models perform better than the global pooling method, which may be explained by that our hierarchical pooling method can not only extract the graph local structures as the low-level features but also preserve these low-level features into the high level space in an hierarchical manner, while the global pooling method can only extract the graph low-level features and combine these features in a naive way (e.g., by concatenating, averaging, etc.).</p>
<table-wrap position="float" id="T2">
<label>Table 2</label>
<caption><p>Regression Mean Absolute Error (MAE) with corresponding standard deviations under five-fold cross-validation on 10 ASR scores.</p></caption>
<table frame="hsides" rules="groups">
<thead><tr>
<th/>
<th valign="top" align="center"><bold>PCA&#x0002B;LR</bold></th>
<th valign="top" align="center"><bold>SC&#x0002B;LR</bold></th>
<th valign="top" align="center"><bold>GCN-GlobalPOOL</bold></th>
<th valign="top" align="center"><bold>SAG-POOL</bold></th>
<th valign="top" align="center"><bold>DIFFPOOL</bold></th>
<th valign="top" align="center"><bold>HGP-SL</bold></th>
<th valign="top" align="center"><bold>StructPOOL</bold></th>
<th valign="top" align="center"><bold>Ours</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">ANXD</td>
<td valign="top" align="center">3.66 &#x000B1; 0.0083</td>
<td valign="top" align="center">3.52 &#x000B1; 0.0004</td>
<td valign="top" align="center">3.01 &#x000B1; 0.0013</td>
<td valign="top" align="center">2.26 &#x000B1; 0.0071</td>
<td valign="top" align="center">2.01 &#x000B1; 0.0021</td>
<td valign="top" align="center">1.78 &#x000B1; 0.0062</td>
<td valign="top" align="center">2.11 &#x000B1; 0.0012</td>
<td valign="top" align="center" style="color:#ee1c23">1.49 &#x000B1; 0.0033</td>
</tr>
<tr>
<td valign="top" align="left">WITD</td>
<td valign="top" align="center">3.07 &#x000B1; 0.0005</td>
<td valign="top" align="center">3.19 &#x000B1; 0.0083</td>
<td valign="top" align="center">2.81 &#x000B1; 0.0055</td>
<td valign="top" align="center">1.87 &#x000B1; 0.0052</td>
<td valign="top" align="center">1.91 &#x000B1; 0.0008</td>
<td valign="top" align="center">1.69 &#x000B1; 0.0049</td>
<td valign="top" align="center">1.94 &#x000B1; 0.0036</td>
<td valign="top" align="center" style="color:#ee1c23">1.18 &#x000B1; 0.0011</td>
</tr>
<tr>
<td valign="top" align="left">SOMA</td>
<td valign="top" align="center">2.96 &#x000B1; 0.0091</td>
<td valign="top" align="center">3.03 &#x000B1; 0.0019</td>
<td valign="top" align="center">3.11 &#x000B1; 0.0075</td>
<td valign="top" align="center">1.71 &#x000B1; 0.0008</td>
<td valign="top" align="center">1.83 &#x000B1; 0.0041</td>
<td valign="top" align="center">1.88 &#x000B1; 0.0027</td>
<td valign="top" align="center">1.63 &#x000B1; 0.0007</td>
<td valign="top" align="center" style="color:#ee1c23">1.16 &#x000B1; 0.0021</td>
</tr>
<tr>
<td valign="top" align="left">THOT</td>
<td valign="top" align="center">3.51 &#x000B1; 0.0010</td>
<td valign="top" align="center">3.24 &#x000B1; 0.0022</td>
<td valign="top" align="center">3.09 &#x000B1; 0.0004</td>
<td valign="top" align="center">2.19 &#x000B1; 0.0037</td>
<td valign="top" align="center">2.07 &#x000B1; 0.0027</td>
<td valign="top" align="center">2.04 &#x000B1; 0.0079</td>
<td valign="top" align="center">2.13 &#x000B1; 0.0020</td>
<td valign="top" align="center" style="color:#ee1c23">1.31 &#x000B1; 0.0006</td>
</tr>
<tr>
<td valign="top" align="left">ATTN</td>
<td valign="top" align="center">3.87 &#x000B1; 0.0056</td>
<td valign="top" align="center">3.60 &#x000B1; 0.0008</td>
<td valign="top" align="center">2.94 &#x000B1; 0.0016</td>
<td valign="top" align="center">2.78 &#x000B1; 0.0024</td>
<td valign="top" align="center">2.44 &#x000B1; 0.0053</td>
<td valign="top" align="center">2.33 &#x000B1; 0.0062</td>
<td valign="top" align="center">2.04 &#x000B1; 0.0014</td>
<td valign="top" align="center" style="color:#ee1c23">1.84 &#x000B1; 0.0041</td>
</tr>
<tr>
<td valign="top" align="left">AGGR</td>
<td valign="top" align="center">2.41 &#x000B1; 0.0065</td>
<td valign="top" align="center">2.21 &#x000B1; 0.0072</td>
<td valign="top" align="center">2.37 &#x000B1; 0.0022</td>
<td valign="top" align="center">1.94 &#x000B1; 0.0080</td>
<td valign="top" align="center">1.61 &#x000B1; 0.0034</td>
<td valign="top" align="center">1.59 &#x000B1; 0.0050</td>
<td valign="top" align="center">1.61 &#x000B1; 0.0033</td>
<td valign="top" align="center" style="color:#ee1c23">1.16 &#x000B1; 0.0091</td>
</tr>
<tr>
<td valign="top" align="left">RULE</td>
<td valign="top" align="center">2.99 &#x000B1; 0.0044</td>
<td valign="top" align="center">2.87 &#x000B1; 0.0084</td>
<td valign="top" align="center">2.80 &#x000B1; 0.0009</td>
<td valign="top" align="center">1.85 &#x000B1; 0.0059</td>
<td valign="top" align="center">2.00 &#x000B1; 0.0020</td>
<td valign="top" align="center">1.74 &#x000B1; 0.0040</td>
<td valign="top" align="center">1.89 &#x000B1; 0.0019</td>
<td valign="top" align="center" style="color:#ee1c23">1.49 &#x000B1; 0.0008</td>
</tr>
<tr>
<td valign="top" align="left">INTR</td>
<td valign="top" align="center">3.04 &#x000B1; 0.0009</td>
<td valign="top" align="center">3.20 &#x000B1; 0.0031</td>
<td valign="top" align="center">2.76 &#x000B1; 0.0053</td>
<td valign="top" align="center">2.06 &#x000B1; 0.0064</td>
<td valign="top" align="center">1.98 &#x000B1; 0.0037</td>
<td valign="top" align="center">1.69 &#x000B1; 0.0009</td>
<td valign="top" align="center">1.59 &#x000B1; 0.0020</td>
<td valign="top" align="center" style="color:#ee1c23">1.21 &#x000B1; 0.0037</td>
</tr>
<tr>
<td valign="top" align="left">INTN</td>
<td valign="top" align="center">2.87 &#x000B1; 0.0062</td>
<td valign="top" align="center">3.01 &#x000B1; 0.0039</td>
<td valign="top" align="center">2.61 &#x000B1; 0.0046</td>
<td valign="top" align="center">2.17 &#x000B1; 0.0077</td>
<td valign="top" align="center">2.14 &#x000B1; 0.0040</td>
<td valign="top" align="center">2.15 &#x000B1; 0.0025</td>
<td valign="top" align="center">2.04 &#x000B1; 0.0054</td>
<td valign="top" align="center" style="color:#ee1c23">1.27 &#x000B1; 0.0020</td>
</tr>
<tr>
<td valign="top" align="left">EXTN</td>
<td valign="top" align="center">3.70 &#x000B1; 0.0017</td>
<td valign="top" align="center">3.54 &#x000B1; 0.0055</td>
<td valign="top" align="center">3.45 &#x000B1; 0.0071</td>
<td valign="top" align="center">1.98 &#x000B1; 0.0034</td>
<td valign="top" align="center">2.22 &#x000B1; 0.0005</td>
<td valign="top" align="center">2.07 &#x000B1; 0.0037</td>
<td valign="top" align="center">1.98 &#x000B1; 0.0018</td>
<td valign="top" align="center" style="color:#ee1c23">1.58 &#x000B1; 0.0012</td>
</tr>
<tr>
<td valign="top" align="left">Overall</td>
<td valign="top" align="center">4.62 &#x000B1; 0.0038</td>
<td valign="top" align="center">4.37 &#x000B1; 0.0018</td>
<td valign="top" align="center">4.02 &#x000B1; 0.0045</td>
<td valign="top" align="center">3.62 &#x000B1; 0.0029</td>
<td valign="top" align="center">3.39 &#x000B1; 0.0088</td>
<td valign="top" align="center">3.05 &#x000B1; 0.0011</td>
<td valign="top" align="center">3.24 &#x000B1; 0.0013</td>
<td valign="top" align="center" style="color:#ee1c23">2.93 &#x000B1; 0.0084</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p><italic>Overall denotes the task of jointly predicting all the 10 ASR scores. LR and SC represent linear regression and spectral clustering respectively. The values in red show the best results</italic>.</p>
</table-wrap-foot>
</table-wrap></sec>
<sec>
<title>4.4. Loss Weights Analysis</title>
<p>We search the loss weights of &#x003B7;<sub>1</sub> and &#x003B7;<sub>2</sub> in range of [0.1, 0.5, 1] and [0.01, 0.05, 0.1], respectively, (see <xref ref-type="fig" rid="F2">Figure 2</xref>) for the Overall ASR regression. The best loss weights are determined as &#x003B7;<sub>1</sub> &#x0003D; 0.5 and &#x003B7;<sub>2</sub> &#x0003D; 0.01. <xref ref-type="fig" rid="F2">Figure 2</xref> indicates that the performance of our framework is relatively consistent under different loss weights. We use the same loss weights setting for each single ASR prediction, although the optimal loss weights may slightly different for different prediction.</p>
<fig id="F2" position="float">
<label>Figure 2</label>
<caption><p>Loss weights analysis for the Overall ASR regression task. The optimal values of &#x003B7;<sub>1</sub> and &#x003B7;<sub>2</sub> are 0.5 and 0.01, respectively, where the MAE of overall regression achieves as 2.93.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnins-16-963082-g0002.tif"/>
</fig></sec>
<sec>
<title>4.5. Impact of Community Pooling Modules on the Prediction Performance</title>
<p>In this section, we evaluate how the number of Community Pooling modules affect the prediction performance on 10 ASR scores. We deployed different number of pooling modules (i.e., from 1 to 5) and set the pooling ratio in each pooling module as 0.5 (i.e., only 50% nodes will be preserved after each pooling module). The MAE of ASR scores obtained by the proposed framework with different number of pooling modules are shown in the <xref ref-type="fig" rid="F3">Figure 3A</xref>. <xref ref-type="fig" rid="F3">Figure 3A</xref> shows that the regression performance obtained by our proposed framework are consistent among different ASR scores. In general, with the increasing number of pooling modules, the MAE values first decline and then incline with the minimum MAE value is achieved when 3 pooling modules are deployed. The possible explanation is as follows: when the number of pooling modules is insufficient (e.g., 1 or 2), the high-level features related to the prediction object haven&#x00027;t been extracted enough; while when too many pooling modules (e.g., 4 or 5) are deployed, the extracted features may be too &#x0201C;coarse&#x0201D;, where the key discriminative information have been mosaicked.</p>
<fig id="F3" position="float">
<label>Figure 3</label>
<caption><p>Ablation study. <bold>(A)</bold> Regression MAE under different number of pooling modules. The x-axis is 1 to 5, representing the number of community pooling modules and y-axis is the corresponding MAE. <bold>(B)</bold> Regression MAE obtained by the proposed framework when using different number of node features. The x-axis ranges from 0 to 246, representing different number of nodal features and y-axis is the corresponding MAE.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnins-16-963082-g0003.tif"/>
</fig></sec>
<sec>
<title>4.6. Impact of Nodal Features on the Prediction Performance</title>
<p>Firstly, the number of the pooling modules is fixed as 3 for all experiments in this section. Then, we predict the ASR scores without using any nodal features and treat the feature dimension as zero. This is implemented by setting the node feature matrix as <inline-formula><mml:math id="M37"><mml:mover accent="true"><mml:mrow><mml:mn>1</mml:mn></mml:mrow><mml:mo>&#x02192;</mml:mo></mml:mover></mml:math></inline-formula>). After that, we use PCA algorithm to extract different number of features (from 1 to 240) and use them as the nodal features for the predictions. Lastly, we directly apply the functional network as the nodal feature matrix for the same tasks and in this situation, feature dimension is 246. Therefore, we can compare how the number of nodal features affect the prediction performance, and our results are summarized in the <xref ref-type="fig" rid="F3">Figure 3B</xref>.</p>
<p>There are two main findings in <xref ref-type="fig" rid="F3">Figure 3B</xref>. Firstly, the proposed framework can generally achieve better prediction performance by using the functional network as the node feature matrix. Secondly, we expected that using the principle components of the functional networks as the nodal features could further improve the regression or prediction performance. Among the feature dimension range from 1 to 240, the best result (i.e., the lowest MAE) is achieved at 10, in other words, using the top 10 PCs to form the feature matrix can achieve the best performance when compared with other dimension options. Moreover, although the performance obtained with 10 PCs is close to that obtained by using full functional networks (dimension = 246), using full functional network as the feature matrix (dimension = 246) generally has a better prediction performance than using PCs as the feature input, which indicate there the topological structures in the full functional networks may not be well preserved in the PCA processing. There may have some better choices for the nodal features or dimension reduction techniques, which will be considered in our future research.</p></sec>
<sec>
<title>4.7. Biological Application and Algorithm Fairness</title>
<p>In this section, we will demonstrate how to apply this new framework to identify sex differences. Here, sex is referred as the biological sex, as available data does not permit us to disentangle the influence of social culturally defined gender influences from biological sex effect.</p>
<p>We firstly apply our framework to predict each of the ASR scores for each sex. <xref ref-type="table" rid="T3">Table 3</xref> summarizes the estimation errors (mean &#x000B1; standard deviation) for each gender (column 1 and 2 for male and female respectively). Column 3 in <xref ref-type="table" rid="T3">Table 3</xref> shows the student <italic>t</italic>-test <italic>P</italic>-values for evaluating whether there is any significant difference in the estimation errors between sexes. None of these are significant, in other words, these results demonstrates the fairness of our framework in terms of the variable &#x0201C;sex&#x0201D;.</p>
<table-wrap position="float" id="T3">
<label>Table 3</label>
<caption><p>Estimation errors for predicting each ASR score for each gender.</p></caption>
<table frame="hsides" rules="groups">
<thead><tr>
<th valign="top" align="left"><bold>ASR score</bold></th>
<th valign="top" align="center"><bold>Male</bold></th>
<th valign="top" align="center"><bold>Female</bold></th>
<th valign="top" align="center"><bold><italic>P</italic></bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">ANXD</td>
<td valign="top" align="center">1.74 &#x000B1; 0.03</td>
<td valign="top" align="center">1.73 &#x000B1; 0.02</td>
<td valign="top" align="center">0.66</td>
</tr>
<tr>
<td valign="top" align="left">WITD</td>
<td valign="top" align="center">1.24 &#x000B1; 0.02</td>
<td valign="top" align="center">1.24 &#x000B1; 0.03</td>
<td valign="top" align="center">0.82</td>
</tr>
<tr>
<td valign="top" align="left">SOMA</td>
<td valign="top" align="center">1.25 &#x000B1; 0.02</td>
<td valign="top" align="center">1.27 &#x000B1; 0.06</td>
<td valign="top" align="center">0.44</td>
</tr>
<tr>
<td valign="top" align="left">THOT</td>
<td valign="top" align="center">1.45 &#x000B1; 0.05</td>
<td valign="top" align="center">1.40 &#x000B1; 0.04</td>
<td valign="top" align="center">0.10</td>
</tr>
<tr>
<td valign="top" align="left">ATTN</td>
<td valign="top" align="center">1.96 &#x000B1; 0.06</td>
<td valign="top" align="center">1.95 &#x000B1; 0.03</td>
<td valign="top" align="center">0.78</td>
</tr>
<tr>
<td valign="top" align="left">AGGR</td>
<td valign="top" align="center">1.26 &#x000B1; 0.04</td>
<td valign="top" align="center">1.24 &#x000B1; 0.03</td>
<td valign="top" align="center">0.31</td>
</tr>
<tr>
<td valign="top" align="left">RULE</td>
<td valign="top" align="center">1.62 &#x000B1; 0.07</td>
<td valign="top" align="center">1.55 &#x000B1; 0.08</td>
<td valign="top" align="center">0.16</td>
</tr>
<tr>
<td valign="top" align="left">INTR</td>
<td valign="top" align="center">1.37 &#x000B1; 0.05</td>
<td valign="top" align="center">1.35 &#x000B1; 0.05</td>
<td valign="top" align="center">0.47</td>
</tr>
<tr>
<td valign="top" align="left">INTN</td>
<td valign="top" align="center">1.37 &#x000B1; 0.08</td>
<td valign="top" align="center">1.32 &#x000B1; 0.08</td>
<td valign="top" align="center">0.38</td>
</tr>
<tr>
<td valign="top" align="left">EXTN</td>
<td valign="top" align="center">1.64 &#x000B1; 0.09</td>
<td valign="top" align="center">1.71 &#x000B1; 0.18</td>
<td valign="top" align="center">0.43</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p><italic>The results are reported in the format of mean &#x000B1; standard deviation. The last column is the Student t-test P-value to show whether there is any significant difference in the estimation errors between male and female. These results indicate that our new framework is fair for the variable &#x0201C;sex&#x0201D;</italic>.</p>
</table-wrap-foot>
</table-wrap>
<p>Next, we adopt the permutation approach to evaluate whether there are significant sex differences in the &#x0201C;effecting&#x0201D; node distributions for each ASR score (Please refer to Section 4.1 for technique details). We randomly shuffle the subjects between male and female groups and conduct 100 permutations. All permutation tests are conducted using the computation resource in the Pittsburgh Supercomputing Center (PSC) (Towns et al., <xref ref-type="bibr" rid="B75">2014</xref>; Nystrom et al., <xref ref-type="bibr" rid="B55">2015</xref>). Our permutation results show that there are significant sex differences (<italic>p</italic> &#x0003C; 0.01) in the effecting node distributions for 7 ASR variables except ANXD, SOMA and INTN, which is consistent with the conclusions from <xref ref-type="table" rid="T1">Table 1</xref>. Here we choose ATTN as an example to show the sex differences in the effecting nodal distribution. Attention problem score (ATTN) (Achenbach and Rescorla, <xref ref-type="bibr" rid="B1">2003</xref>) indicates the tendency to be easily distracted and unable to concentrate more than momentarily. <xref ref-type="fig" rid="F4">Figure 4</xref> shows the effecting node distributions for male and female, and the hot color indicates the stronger involvements of that ROI in this psychiatric process (or ATTN) and the cool color indicate the opposite. Our results show there are multiple brain regions (including Left Paracentral lobule, Right Posterior cingulate and Left dorsomedial prefrontal cortex, Right Precuneus, and Left Premotor, highlighted using black circle in <xref ref-type="fig" rid="F4">Figure 4</xref>) showing significantly different involvements in this psychiatric process between sexes.</p>
<fig id="F4" position="float">
<label>Figure 4</label>
<caption><p>Sex difference identified for ATTN. The color indicates the region involved in the ATTN process and the hotter color indicate the stronger involvement and the cooler color indicate the inverse. Permutation tests have been adopted to confirm the significance of this sex difference (<italic>p</italic> &#x0003C; 0.01).The main sex differences are in several regions, which are highlighted using a black circle. These regions include Left Paracentral lobule, Right Posterior cingulate and Left dorso-medial prefrontal cortex, Right Precuneus, and Left Premotor.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fnins-16-963082-g0004.tif"/>
</fig>
<p>Previous studies reported that paracentral lobule is activated in covert shifts of attention (Grosbras et al., <xref ref-type="bibr" rid="B25">2005</xref>) and auditory attention shifting (Huang et al., <xref ref-type="bibr" rid="B32">2012</xref>). Moreover, Dickstein et al. (<xref ref-type="bibr" rid="B16">2006</xref>) reported that right paracentral lobule had a greater probability of activation in patients with Attention-deficit/hyperactivity disorder (ADHD) than in controls while our results show that part of sex differences for healthy controls is in the left paracentral lobule, which deserves further investigations in the future. The posterior cingulate cortex (PCC) is a central node of the default mode network (DMN) and many evidence suggests that the PCC plays a direct role in attentionally demanding tasks (Gusnard and Raichle, <xref ref-type="bibr" rid="B26">2001</xref>; Vogt and Laureys, <xref ref-type="bibr" rid="B83">2005</xref>; Hampson et al., <xref ref-type="bibr" rid="B29">2006</xref>; Hahn et al., <xref ref-type="bibr" rid="B27">2007</xref>; Leech et al., <xref ref-type="bibr" rid="B39">2011</xref>; Leech and Sharp, <xref ref-type="bibr" rid="B40">2014</xref>). The dorsomedial prefrontal cortex (dmPFC) receives afferent input from sensory and parietal regions of the cortex, which presumably enable the dmPFC to respond to situations that require immediate attention and respond with appropriate actions (Narayanan and Laubach, <xref ref-type="bibr" rid="B53">2006</xref>; Venkatraman et al., <xref ref-type="bibr" rid="B81">2009</xref>; Park et al., <xref ref-type="bibr" rid="B56">2016</xref>). Additionally, Precuneus has been reported to highly involve in attention shift (Cavanna and Trimble, <xref ref-type="bibr" rid="B8">2006</xref>) while Premotor is involved in Reorienting attention (Rizzolatti et al., <xref ref-type="bibr" rid="B59">1987</xref>) and attention-deficit/hyperactivity disorder (Mostofsky et al., <xref ref-type="bibr" rid="B52">2002</xref>). All these clearly indicate that our new AI framework can discover potential biologically-meaningful results for regression studies.</p></sec></sec>
<sec sec-type="conclusions" id="s5">
<title>5. Conclusion</title>
<p>In this study, we proposed a novel interpretable graph learning framework for brain network regression analysis. We demonstrated that our new framework has better prediction performances than state-of-the-arts graph learning methods in predicting young health subjects&#x00027; psychiatric scores. Additionally, we chose one of the psychiatric scores to demonstrate how this new framework can be used to study sex differences. Future work will focus on how to modify our framework for the signed graph data.</p></sec>
<sec sec-type="data-availability" id="s6">
<title>Data Availability Statement</title>
<p>Publicly available datasets were analyzed in this study. This data can be found here: <ext-link ext-link-type="uri" xlink:href="https://www.humanconnectome.org/study/hcp-young-adult/document/1200-subjects-data-release">https://www.humanconnectome.org/study/hcp-young-adult/document/1200-subjects-data-release</ext-link>.</p></sec>
<sec id="s7">
<title>Author Contributions</title>
<p>HT took charge of conception and design, method implementation, statistical analysis, and interpretation, as well as manuscript writing and revising. LZ took charge of project design, data preprocessing, analysis and interpretation, manuscript writing/revising. LG, XF, BQ, OA, YW, PT, HH, and AL took charge of experiment design, results discussion, and manuscript proofreading. All authors contributed to the article and approved the submitted version.</p></sec>
<sec sec-type="funding-information" id="s8">
<title>Funding</title>
<p>This study was partially supported by the National Institutes of Health (R01AG071243, R01MH125928, and U01AG068057) and National Science Foundation (IIS 2045848 and IIS 1837956).</p></sec>
<sec sec-type="COI-statement" id="conf1">
<title>Conflict of Interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p></sec>
<sec sec-type="disclaimer" id="s9">
<title>Publisher&#x00027;s Note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p></sec> </body>
<back>
<ack><p>We thank MGH-USC Consortium (Principal Investigators: Bruce R. Rosen, Arthur W. Toga, and Van Wedeen; U01MH093765), which was funded by the NIH Blueprint Initiative for Neuroscience Research grant; the National Institutes of Health grant P41EB015896; and the Instrumentation Grants S10RR023043, 1S10RR023401, 1S10RR019307, which provides the Human Connectome Project data for our work. We thank the Extreme Science and Engineering Discovery Environment (XSEDE), which was supported by National Science Foundation (NSF) grant number ACI-1548562 and NSF award number ACI-1445606, which provide the computation resources based on Pittsburgh Supercomputing Center (PSC) for part of our work.</p>
</ack>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Achenbach</surname> <given-names>T. M.</given-names></name> <name><surname>Rescorla</surname> <given-names>L.</given-names></name></person-group> (<year>2003</year>). <source>Manual for the Aseba Adult Forms</source> &#x00026; <italic>Profiles</italic>. <publisher-loc>Burlington, VT</publisher-loc>: <publisher-name>University of Vermont, Research Center for Children, Youth</publisher-name>.</citation>
</ref>
<ref id="B2">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ajilore</surname> <given-names>O.</given-names></name> <name><surname>Zhan</surname> <given-names>L.</given-names></name> <name><surname>GadElkarim</surname> <given-names>J.</given-names></name> <name><surname>Zhang</surname> <given-names>A.</given-names></name> <name><surname>Feusner</surname> <given-names>J.</given-names></name> <name><surname>Yang</surname> <given-names>S.</given-names></name> <etal/></person-group>. (<year>2013</year>). <article-title>Constructing the resting state structural connectome</article-title>. <source>Front. Neuroinform</source>. <volume>7</volume>:<fpage>30</fpage>. <pub-id pub-id-type="doi">10.3389/fninf.2013.00030</pub-id><pub-id pub-id-type="pmid">24409139</pub-id></citation></ref>
<ref id="B3">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bathelt</surname> <given-names>J.</given-names></name> <name><surname>O&#x00027;Reilly</surname> <given-names>H.</given-names></name> <name><surname>Clayden</surname> <given-names>J. D.</given-names></name> <name><surname>Cross</surname> <given-names>J. H.</given-names></name> <name><surname>de Haan</surname> <given-names>M.</given-names></name></person-group> (<year>2013</year>). <article-title>Functional brain network organisation of children between 2 and 5 years derived from reconstructed activity of cortical sources of high-density eeg recordings</article-title>. <source>Neuroimage</source> <volume>82</volume>, <fpage>595</fpage>&#x02013;<lpage>604</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2013.06.003</pub-id><pub-id pub-id-type="pmid">23769920</pub-id></citation></ref>
<ref id="B4">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Beaty</surname> <given-names>R. E.</given-names></name> <name><surname>Kenett</surname> <given-names>Y. N.</given-names></name> <name><surname>Christensen</surname> <given-names>A. P.</given-names></name> <name><surname>Rosenberg</surname> <given-names>M. D.</given-names></name> <name><surname>Benedek</surname> <given-names>M.</given-names></name> <name><surname>Chen</surname> <given-names>Q.</given-names></name> <etal/></person-group>. (<year>2018</year>). <article-title>Robust prediction of individual creative ability from brain functional connectivity</article-title>. <source>Proc. Natl. Acad. Sci. U.S.A</source>. <volume>115</volume>, <fpage>1087</fpage>&#x02013;<lpage>1092</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.1713532115</pub-id><pub-id pub-id-type="pmid">29339474</pub-id></citation></ref>
<ref id="B5">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Behrens</surname> <given-names>T. E.</given-names></name> <name><surname>Berg</surname> <given-names>H. J.</given-names></name> <name><surname>Jbabdi</surname> <given-names>S.</given-names></name> <name><surname>Rushworth</surname> <given-names>M. F.</given-names></name> <name><surname>Woolrich</surname> <given-names>M. W.</given-names></name></person-group> (<year>2007</year>). <article-title>Probabilistic diffusion tractography with multiple fibre orientations: what can we gain?</article-title> <source>Neuroimage</source> <volume>34</volume>, <fpage>144</fpage>&#x02013;<lpage>155</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2006.09.018</pub-id><pub-id pub-id-type="pmid">17070705</pub-id></citation></ref>
<ref id="B6">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Behrens</surname> <given-names>T. E.</given-names></name> <name><surname>Woolrich</surname> <given-names>M. W.</given-names></name> <name><surname>Jenkinson</surname> <given-names>M.</given-names></name> <name><surname>Johansen-Berg</surname> <given-names>H.</given-names></name> <name><surname>Nunes</surname> <given-names>R. G.</given-names></name> <name><surname>Clare</surname> <given-names>S.</given-names></name> <etal/></person-group>. (<year>2003</year>). <article-title>Characterization and propagation of uncertainty in diffusion-weighted mr imaging</article-title>. <source>Magnet. Reson. Med</source>. <volume>50</volume>, <fpage>1077</fpage>&#x02013;<lpage>1088</lpage>. <pub-id pub-id-type="doi">10.1002/mrm.10609</pub-id><pub-id pub-id-type="pmid">14587019</pub-id></citation></ref>
<ref id="B7">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Brown</surname> <given-names>C. J.</given-names></name> <name><surname>Moriarty</surname> <given-names>K. P.</given-names></name> <name><surname>Miller</surname> <given-names>S. P.</given-names></name> <name><surname>Booth</surname> <given-names>B. G.</given-names></name> <name><surname>Zwicker</surname> <given-names>J. G.</given-names></name> <name><surname>Grunau</surname> <given-names>R. E.</given-names></name> <etal/></person-group>. (<year>2017</year>). <article-title>&#x0201C;Prediction of brain network age and factors of delayed maturation in very preterm infants,&#x0201D;</article-title> in <source>International Conference on Medical Image Computing and Computer-Assisted Intervention</source> (<publisher-loc>Quebec City, QC</publisher-loc>: <publisher-name>Springer</publisher-name>), <fpage>84</fpage>&#x02013;<lpage>91</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-319-66182-7_10</pub-id></citation>
</ref>
<ref id="B8">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cavanna</surname> <given-names>A. E.</given-names></name> <name><surname>Trimble</surname> <given-names>M. R.</given-names></name></person-group> (<year>2006</year>). <article-title>The precuneus: a review of its functional anatomy and behavioural correlates</article-title>. <source>Brain</source> <volume>129</volume>, <fpage>564</fpage>&#x02013;<lpage>583</lpage>. <pub-id pub-id-type="doi">10.1093/brain/awl004</pub-id><pub-id pub-id-type="pmid">16399806</pub-id></citation></ref>
<ref id="B9">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Chen</surname> <given-names>D.</given-names></name> <name><surname>Lin</surname> <given-names>Y.</given-names></name> <name><surname>Li</surname> <given-names>W.</given-names></name> <name><surname>Li</surname> <given-names>P.</given-names></name> <name><surname>Zhou</surname> <given-names>J.</given-names></name> <name><surname>Sun</surname> <given-names>X.</given-names></name></person-group> (<year>2020</year>). <article-title>&#x0201C;Measuring and relieving the over-smoothing problem for graph neural networks from the topological view,&#x0201D;</article-title> in <source>Proceedings of the AAAI Conference on Artificial Intelligence</source> (<publisher-loc>New York, NY</publisher-loc>: <publisher-name>ACM</publisher-name>), <fpage>3438</fpage>&#x02013;<lpage>3445</lpage>. <pub-id pub-id-type="doi">10.1609/aaai.v34i04.5747</pub-id></citation>
</ref>
<ref id="B10">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chennu</surname> <given-names>S.</given-names></name> <name><surname>Annen</surname> <given-names>J.</given-names></name> <name><surname>Wannez</surname> <given-names>S.</given-names></name> <name><surname>Thibaut</surname> <given-names>A.</given-names></name> <name><surname>Chatelle</surname> <given-names>C.</given-names></name> <name><surname>Cassol</surname> <given-names>H.</given-names></name> <etal/></person-group>. (<year>2017</year>). <article-title>Brain networks predict metabolism, diagnosis and prognosis at the bedside in disorders of consciousness</article-title>. <source>Brain</source> <volume>140</volume>, <fpage>2120</fpage>&#x02013;<lpage>2132</lpage>. <pub-id pub-id-type="doi">10.1093/brain/awx163</pub-id><pub-id pub-id-type="pmid">28666351</pub-id></citation></ref>
<ref id="B11">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Craddock</surname> <given-names>R. C.</given-names></name> <name><surname>Tungaraza</surname> <given-names>R. L.</given-names></name> <name><surname>Milham</surname> <given-names>M. P.</given-names></name></person-group> (<year>2015</year>). <article-title>Connectomics and new approaches for analyzing human brain functional connectivity</article-title>. <source>Gigascience</source> <volume>4</volume>, <fpage>s13742</fpage>-s13015. <pub-id pub-id-type="doi">10.1186/s13742-015-0045-x</pub-id><pub-id pub-id-type="pmid">25810900</pub-id></citation></ref>
<ref id="B12">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Cui</surname> <given-names>H.</given-names></name> <name><surname>Dai</surname> <given-names>W.</given-names></name> <name><surname>Zhu</surname> <given-names>Y.</given-names></name> <name><surname>Li</surname> <given-names>X.</given-names></name> <name><surname>He</surname> <given-names>L.</given-names></name> <name><surname>Yang</surname> <given-names>C.</given-names></name></person-group> (<year>2021</year>). <article-title>Brainnnexplainer: an interpretable graph neural network framework for brain network based disease analysis</article-title>. <source>arXiv[Preprint].</source> arXiv:2107.05097. <pub-id pub-id-type="doi">10.48550/arXiv.2107.05097</pub-id></citation>
</ref>
<ref id="B13">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dai</surname> <given-names>T.</given-names></name> <name><surname>Guo</surname> <given-names>Y.</given-names></name> <collab>Alzheimer&#x00027;s Disease Neuroimaging Initiative</collab></person-group> (<year>2017</year>). <article-title>Predicting individual brain functional connectivity using a Bayesian hierarchical model</article-title>. <source>Neuroimage</source> <volume>147</volume>, <fpage>772</fpage>&#x02013;<lpage>787</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2016.11.048</pub-id><pub-id pub-id-type="pmid">27915121</pub-id></citation></ref>
<ref id="B14">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Dehmamy</surname> <given-names>N.</given-names></name> <name><surname>Barab&#x000E1;si</surname> <given-names>A.-L.</given-names></name> <name><surname>Yu</surname> <given-names>R.</given-names></name></person-group> (<year>2019</year>). <article-title>&#x0201C;Understanding the representation power of graph neural networks in learning graph topology,&#x0201D;</article-title> in <source>Advances in Neural Information Processing Systems</source>, eds H. Wallach, H. Larochelle, A. Beygelzimer, F. d&#x00027;Alch&#x000E9;-Buc, E. Fox, and R. Garnett (Vancouver, BC), <fpage>15413</fpage>&#x02013;<lpage>15423</lpage>.</citation>
</ref>
<ref id="B15">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>D&#x000ED;az-Arteche</surname> <given-names>C.</given-names></name> <name><surname>Rakesh</surname> <given-names>D.</given-names></name></person-group> (<year>2020</year>). <article-title>Using neuroimaging to predict brain age: insights into typical and atypical development and risk for psychopathology</article-title>. <source>J. Neurophysiol</source>. <volume>124</volume>, <fpage>400</fpage>&#x02013;<lpage>403</lpage>. <pub-id pub-id-type="doi">10.1152/jn.00267.2020</pub-id><pub-id pub-id-type="pmid">32639858</pub-id></citation></ref>
<ref id="B16">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dickstein</surname> <given-names>S. G.</given-names></name> <name><surname>Bannon</surname> <given-names>K.</given-names></name> <name><surname>Xavier Castellanos</surname> <given-names>F.</given-names></name> <name><surname>Milham</surname> <given-names>M. P.</given-names></name></person-group> (<year>2006</year>). <article-title>The neural correlates of attention deficit hyperactivity disorder: an ale meta-analysis</article-title>. <source>J. Child Psychol. Psychiatry</source> <volume>47</volume>, <fpage>1051</fpage>&#x02013;<lpage>1062</lpage>. <pub-id pub-id-type="doi">10.1111/j.1469-7610.2006.01671.x</pub-id><pub-id pub-id-type="pmid">17073984</pub-id></citation></ref>
<ref id="B17">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Du</surname> <given-names>J.</given-names></name> <name><surname>Wang</surname> <given-names>Y.</given-names></name> <name><surname>Zhi</surname> <given-names>N.</given-names></name> <name><surname>Geng</surname> <given-names>J.</given-names></name> <name><surname>Cao</surname> <given-names>W.</given-names></name> <name><surname>Yu</surname> <given-names>L.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>Structural brain network measures are superior to vascular burden scores in predicting early cognitive impairment in post stroke patients with small vessel disease</article-title>. <source>Neuroimage Clin</source>. <volume>22</volume>, <fpage>101712</fpage>. <pub-id pub-id-type="doi">10.1016/j.nicl.2019.101712</pub-id><pub-id pub-id-type="pmid">30772684</pub-id></citation></ref>
<ref id="B18">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Duffy</surname> <given-names>B. A.</given-names></name> <name><surname>Zhang</surname> <given-names>W.</given-names></name> <name><surname>Tang</surname> <given-names>H.</given-names></name> <name><surname>Zhao</surname> <given-names>L.</given-names></name> <name><surname>Law</surname> <given-names>M.</given-names></name> <name><surname>Toga</surname> <given-names>A. W.</given-names></name> <etal/></person-group>. (<year>2018</year>). <article-title>Retrospective correction of motion artifact affected structural MRI images using deep learning of simulated motion</article-title>. <source>Neuroimage</source> <volume>230</volume>, <fpage>117756</fpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2021.117756</pub-id><pub-id pub-id-type="pmid">33460797</pub-id></citation></ref>
<ref id="B19">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Eichele</surname> <given-names>T.</given-names></name> <name><surname>Debener</surname> <given-names>S.</given-names></name> <name><surname>Calhoun</surname> <given-names>V. D.</given-names></name> <name><surname>Specht</surname> <given-names>K.</given-names></name> <name><surname>Engel</surname> <given-names>A. K.</given-names></name> <name><surname>Hugdahl</surname> <given-names>K.</given-names></name> <etal/></person-group>. (<year>2008</year>). <article-title>Prediction of human errors by maladaptive changes in event-related brain networks</article-title>. <source>Proc. Natl. Acad. Sci. U.S.A</source>. <volume>105</volume>, <fpage>6173</fpage>&#x02013;<lpage>6178</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.0708965105</pub-id><pub-id pub-id-type="pmid">18427123</pub-id></citation></ref>
<ref id="B20">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Ester</surname> <given-names>M.</given-names></name> <name><surname>Kriegel</surname> <given-names>H.-P.</given-names></name> <name><surname>Sander</surname> <given-names>J.</given-names></name> <name><surname>Xu</surname> <given-names>X.</given-names></name></person-group> (<year>1996</year>). <article-title>&#x0201C;A density-based algorithm for discovering clusters in large spatial databases with noise,&#x0201D;</article-title> in <source>KDD&#x00027;96: Proceedings of the Second International Conference on Knowledge Discovery and Data Mining</source> (<publisher-loc>Portland, OR</publisher-loc>), <fpage>226</fpage>&#x02013;<lpage>231</lpage>.</citation>
</ref>
<ref id="B21">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fan</surname> <given-names>L.</given-names></name> <name><surname>Li</surname> <given-names>H.</given-names></name> <name><surname>Zhuo</surname> <given-names>J.</given-names></name> <name><surname>Zhang</surname> <given-names>Y.</given-names></name> <name><surname>Wang</surname> <given-names>J.</given-names></name> <name><surname>Chen</surname> <given-names>L.</given-names></name> <etal/></person-group>. (<year>2016</year>). <article-title>The human brainnetome atlas: a new brain atlas based on connectional architecture</article-title>. <source>Cereb. Cortex</source> <volume>26</volume>, <fpage>3508</fpage>&#x02013;<lpage>3526</lpage>. <pub-id pub-id-type="doi">10.1093/cercor/bhw157</pub-id><pub-id pub-id-type="pmid">27230218</pub-id></citation></ref>
<ref id="B22">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Fey</surname> <given-names>M.</given-names></name> <name><surname>Lenssen</surname> <given-names>J. E.</given-names></name></person-group> (<year>2019</year>). <article-title>Fast graph representation learning with pytorch geometric</article-title>. <source>arXiv[Preprint].</source> arXiv:1903.02428. <pub-id pub-id-type="doi">10.48550/arXiv.1903.02428</pub-id></citation>
</ref>
<ref id="B23">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fischer</surname> <given-names>F. U.</given-names></name> <name><surname>Wolf</surname> <given-names>D.</given-names></name> <name><surname>Scheurich</surname> <given-names>A.</given-names></name> <name><surname>Fellgiebel</surname> <given-names>A.</given-names></name></person-group> (<year>2014</year>). <article-title>Association of structural global brain network properties with intelligence in normal aging</article-title>. <source>PLoS ONE</source> <volume>9</volume>, <fpage>e86258</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0086258</pub-id><pub-id pub-id-type="pmid">24465994</pub-id></citation></ref>
<ref id="B24">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Gao</surname> <given-names>H.</given-names></name> <name><surname>Wang</surname> <given-names>Z.</given-names></name> <name><surname>Ji</surname> <given-names>S.</given-names></name></person-group> (<year>2018</year>). <article-title>&#x0201C;Large-scale learnable graph convolutional networks,&#x0201D;</article-title> in <source>Proceedings of the 24th ACM SIGKDD International Conference on Knowledge Discovery &#x00026; Data Mining</source> (<publisher-loc>London</publisher-loc>), <fpage>1416</fpage>&#x02013;<lpage>1424</lpage>. <pub-id pub-id-type="doi">10.1145/3219819.3219947</pub-id><pub-id pub-id-type="pmid">33006927</pub-id></citation></ref>
<ref id="B25">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Grosbras</surname> <given-names>M.-H.</given-names></name> <name><surname>Laird</surname> <given-names>A. R.</given-names></name> <name><surname>Paus</surname> <given-names>T.</given-names></name></person-group> (<year>2005</year>). <article-title>Cortical regions involved in eye movements, shifts of attention, and gaze perception</article-title>. <source>Hum. Brain Mapp</source>. <volume>25</volume>, <fpage>140</fpage>&#x02013;<lpage>154</lpage>. <pub-id pub-id-type="doi">10.1002/hbm.20145</pub-id><pub-id pub-id-type="pmid">15846814</pub-id></citation></ref>
<ref id="B26">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gusnard</surname> <given-names>D. A.</given-names></name> <name><surname>Raichle</surname> <given-names>M. E.</given-names></name></person-group> (<year>2001</year>). <article-title>Searching for a baseline: functional imaging and the resting human brain</article-title>. <source>Nat. Rev. Neurosci</source>. <volume>2</volume>, <fpage>685</fpage>&#x02013;<lpage>694</lpage>. <pub-id pub-id-type="doi">10.1038/35094500</pub-id><pub-id pub-id-type="pmid">11584306</pub-id></citation></ref>
<ref id="B27">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hahn</surname> <given-names>B.</given-names></name> <name><surname>Ross</surname> <given-names>T. J.</given-names></name> <name><surname>Stein</surname> <given-names>E. A.</given-names></name></person-group> (<year>2007</year>). <article-title>Cingulate activation increases dynamically with response speed under stimulus unpredictability</article-title>. <source>Cereb. Cortex</source> <volume>17</volume>, <fpage>1664</fpage>&#x02013;<lpage>1671</lpage>. <pub-id pub-id-type="doi">10.1093/cercor/bhl075</pub-id><pub-id pub-id-type="pmid">16963517</pub-id></citation></ref>
<ref id="B28">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Hamilton</surname> <given-names>W. L.</given-names></name> <name><surname>Ying</surname> <given-names>R.</given-names></name> <name><surname>Leskovec</surname> <given-names>J.</given-names></name></person-group> (<year>2017</year>). <article-title>&#x0201C;Inductive representation learning on large graphs,&#x0201D;</article-title> in <source>Proceedings of the 31st International Conference on Neural Information Processing Systems</source>, eds I. Guyon, U. Von Luxburg, S. Bengio, H. Wallach, R. Fergus, S. Vishwanathan, and R. Garnett (Long Beach, CA), <fpage>1025</fpage>&#x02013;<lpage>1035</lpage>.<pub-id pub-id-type="pmid">34111002</pub-id></citation></ref>
<ref id="B29">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hampson</surname> <given-names>M.</given-names></name> <name><surname>Driesen</surname> <given-names>N. R.</given-names></name> <name><surname>Skudlarski</surname> <given-names>P.</given-names></name> <name><surname>Gore</surname> <given-names>J. C.</given-names></name> <name><surname>Constable</surname> <given-names>R. T.</given-names></name></person-group> (<year>2006</year>). <article-title>Brain connectivity related to working memory performance</article-title>. <source>J. Neurosci</source>. <volume>26</volume>, <fpage>13338</fpage>&#x02013;<lpage>13343</lpage>. <pub-id pub-id-type="doi">10.1523/JNEUROSCI.3408-06.2006</pub-id><pub-id pub-id-type="pmid">32065091</pub-id></citation></ref>
<ref id="B30">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Heuvel van den</surname> <given-names>M. P.</given-names></name> <name><surname>Sporns</surname> <given-names>O.</given-names></name></person-group> (<year>2013</year>). <article-title>Network hubs in the human brain</article-title>. <source>Trends Cogn. Sci</source>. <volume>17</volume>, <fpage>683</fpage>&#x02013;<lpage>696</lpage>. <pub-id pub-id-type="doi">10.1016/j.tics.2013.09.012</pub-id><pub-id pub-id-type="pmid">24231140</pub-id></citation></ref>
<ref id="B31">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Hu</surname> <given-names>C.</given-names></name> <name><surname>Ju</surname> <given-names>R.</given-names></name> <name><surname>Shen</surname> <given-names>Y.</given-names></name> <name><surname>Zhou</surname> <given-names>P.</given-names></name> <name><surname>Li</surname> <given-names>Q.</given-names></name></person-group> (<year>2016</year>). <article-title>&#x0201C;Clinical decision support for Alzheimer&#x00027;s disease based on deep learning and brain network,&#x0201D;</article-title> in <source>2016 IEEE International Conference on Communications (ICC)</source> (<publisher-loc>Kuala Lumpur</publisher-loc>: <publisher-name>IEEE</publisher-name>), <fpage>1</fpage>&#x02013;<lpage>6</lpage>. <pub-id pub-id-type="doi">10.1109/ICC.2016.7510831</pub-id></citation>
</ref>
<ref id="B32">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Huang</surname> <given-names>S.</given-names></name> <name><surname>Belliveau</surname> <given-names>J. W.</given-names></name> <name><surname>Tengshe</surname> <given-names>C.</given-names></name> <name><surname>Ahveninen</surname> <given-names>J.</given-names></name></person-group> (<year>2012</year>). Brain networks of novelty-driven involuntary and cued voluntary auditory attention shifting. PLoS ONE 7, e44062. <pub-id pub-id-type="doi">10.1371/journal.pone.0044062</pub-id><pub-id pub-id-type="pmid">22937153</pub-id></citation></ref>
<ref id="B33">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ju</surname> <given-names>R.</given-names></name> <name><surname>Hu</surname> <given-names>C.</given-names></name> <name><surname>Zhou</surname> <given-names>P.</given-names></name> <name><surname>Li</surname> <given-names>Q.</given-names></name></person-group> (<year>2017</year>). <article-title>Early diagnosis of Alzheimer&#x00027;s disease based on resting-state brain networks and deep learning</article-title>. <source>IEEE/ACM Trans. Comput. Biol. Bioinform</source>. <volume>16</volume>, <fpage>244</fpage>&#x02013;<lpage>257</lpage>. <pub-id pub-id-type="doi">10.1109/TCBB.2017.2776910</pub-id><pub-id pub-id-type="pmid">29989989</pub-id></citation></ref>
<ref id="B34">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Kim</surname> <given-names>H.</given-names></name> <name><surname>Irimia</surname> <given-names>A.</given-names></name> <name><surname>Hobel</surname> <given-names>S. M.</given-names></name> <name><surname>Pogosyan</surname> <given-names>M.</given-names></name> <name><surname>Tang</surname> <given-names>H.</given-names></name> <name><surname>Petrosyan</surname> <given-names>P.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>The loni qc system: a semi-automated, web-based and freely-available environment for the comprehensive quality control of neuroimaging data</article-title>. <source>Front. Neuroinform</source>. <volume>13</volume>, <fpage>60</fpage>. <pub-id pub-id-type="doi">10.3389/fninf.2019.00060</pub-id><pub-id pub-id-type="pmid">31555116</pub-id></citation></ref>
<ref id="B35">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Kingma</surname> <given-names>D. P.</given-names></name> <name><surname>Ba</surname> <given-names>J.</given-names></name></person-group> (<year>2015</year>). <article-title>&#x0201C;Adam: a method for stochastic optimization,&#x0201D;</article-title> in <source>International Conference on Learning Representations</source> (<publisher-loc>San Diego, CA</publisher-loc>).</citation>
</ref>
<ref id="B36">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Kipf</surname> <given-names>T. N.</given-names></name> <name><surname>Welling</surname> <given-names>M.</given-names></name></person-group> (<year>2016</year>). <article-title>Semi-supervised classification with graph convolutional networks</article-title>. <source>arXiv[Preprint].</source> arXiv:1609.02907. <pub-id pub-id-type="doi">10.48550/arXiv.1609.02907</pub-id></citation>
</ref>
<ref id="B37">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kuo</surname> <given-names>C.-Y.</given-names></name> <name><surname>Lee</surname> <given-names>P.-L.</given-names></name> <name><surname>Hung</surname> <given-names>S.-C.</given-names></name> <name><surname>Liu</surname> <given-names>L.-K.</given-names></name> <name><surname>Lee</surname> <given-names>W.-J.</given-names></name> <name><surname>Chung</surname> <given-names>C.-P.</given-names></name> <etal/></person-group>. (<year>2020</year>). <article-title>Large-scale structural covariance networks predict age in middle-to-late adulthood: a novel brain aging biomarker</article-title>. <source>Cereb. Cortex</source> <volume>30</volume>, <fpage>5844</fpage>&#x02013;<lpage>5862</lpage>. <pub-id pub-id-type="doi">10.1093/cercor/bhaa161</pub-id><pub-id pub-id-type="pmid">32572452</pub-id></citation></ref>
<ref id="B38">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Lee</surname> <given-names>J.</given-names></name> <name><surname>Lee</surname> <given-names>I.</given-names></name> <name><surname>Kang</surname> <given-names>J.</given-names></name></person-group> (<year>2019</year>). <article-title>&#x0201C;Self-attention graph pooling,&#x0201D;</article-title> in <source>International Conference on Machine Learning</source> (<publisher-loc>Long Beach, CA</publisher-loc>: <publisher-name>PMLR</publisher-name>), <fpage>3734</fpage>&#x02013;<lpage>3743</lpage>.</citation>
</ref>
<ref id="B39">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Leech</surname> <given-names>R.</given-names></name> <name><surname>Kamourieh</surname> <given-names>S.</given-names></name> <name><surname>Beckmann</surname> <given-names>C. F.</given-names></name> <name><surname>Sharp</surname> <given-names>D. J.</given-names></name></person-group> (<year>2011</year>). <article-title>Fractionating the default mode network: distinct contributions of the ventral and dorsal posterior cingulate cortex to cognitive control</article-title>. <source>J. Neurosci</source>. <volume>31</volume>, <fpage>3217</fpage>&#x02013;<lpage>3224</lpage>. <pub-id pub-id-type="doi">10.1523/JNEUROSCI.5626-10.2011</pub-id><pub-id pub-id-type="pmid">21368033</pub-id></citation></ref>
<ref id="B40">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Leech</surname> <given-names>R.</given-names></name> <name><surname>Sharp</surname> <given-names>D. J.</given-names></name></person-group> (<year>2014</year>). <article-title>The role of the posterior cingulate cortex in cognition and disease</article-title>. <source>Brain</source> <volume>137</volume>, <fpage>12</fpage>&#x02013;<lpage>32</lpage>. <pub-id pub-id-type="doi">10.1093/brain/awt162</pub-id><pub-id pub-id-type="pmid">23869106</pub-id></citation></ref>
<ref id="B41">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lehmann</surname> <given-names>B.</given-names></name> <name><surname>Henson</surname> <given-names>R.</given-names></name> <name><surname>Geerligs</surname> <given-names>L.</given-names></name> <name><surname>Can</surname> <given-names>C.</given-names></name> <name><surname>White</surname> <given-names>S.</given-names></name></person-group> (<year>2021</year>). <article-title>Characterising group-level brain connectivity: a framework using Bayesian exponential random graph models</article-title>. <source>Neuroimage</source> <volume>225</volume>, <fpage>117480</fpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2020.117480</pub-id><pub-id pub-id-type="pmid">33099009</pub-id></citation></ref>
<ref id="B42">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lehrer</surname> <given-names>J..</given-names></name></person-group> (<year>2009</year>). <article-title>Neuroscience: making connections</article-title>. <source>Nat. News</source> <volume>457</volume>, <fpage>524</fpage>&#x02013;<lpage>527</lpage>. <pub-id pub-id-type="doi">10.1038/457524a</pub-id><pub-id pub-id-type="pmid">19177102</pub-id></citation></ref>
<ref id="B43">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Tang</surname> <given-names>H.</given-names></name> <name><surname>Deng</surname> <given-names>C.</given-names></name> <name><surname>Zhan</surname> <given-names>L.</given-names></name> <name><surname>Liu</surname> <given-names>W.</given-names></name></person-group> (<year>2020</year>). <article-title>&#x0201C;Vulnerability vs. reliability: disentangled adversarial examples for cross-modal learning,&#x0201D;</article-title> in <source>Proceedings of the 26th ACM SIGKDD International Conference on Knowledge Discovery &#x00026; Data Mining</source> (<publisher-loc>San Diego, CA</publisher-loc>), <fpage>421</fpage>&#x02013;<lpage>429</lpage>. <pub-id pub-id-type="doi">10.1145/3394486.3403084</pub-id></citation>
</ref>
<ref id="B44">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>Q.</given-names></name> <name><surname>Han</surname> <given-names>Z.</given-names></name> <name><surname>Wu</surname> <given-names>X.-M.</given-names></name></person-group> (<year>2018</year>). <article-title>&#x0201C;Deeper insights into graph convolutional networks for semi-supervised learning,&#x0201D;</article-title> in <source>Thirty-Second AAAI Conference on Artificial Intelligence</source> (<publisher-loc>New Orleans, LA</publisher-loc>: <publisher-name>AAAI</publisher-name>). <pub-id pub-id-type="doi">10.1609/aaai.v32i1.11604</pub-id></citation>
</ref>
<ref id="B45">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>X.</given-names></name> <name><surname>Li</surname> <given-names>Y.</given-names></name> <name><surname>Li</surname> <given-names>X.</given-names></name></person-group> (<year>2017</year>). <article-title>&#x0201C;Predicting clinical outcomes of Alzheimer&#x00027;s disease from complex brain networks,&#x0201D;</article-title> in <source>International Conference on Advanced Data Mining and Applications</source> (<publisher-loc>Singapore</publisher-loc>: <publisher-name>Springer</publisher-name>), <fpage>519</fpage>&#x02013;<lpage>525</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-319-69179-4_36</pub-id></citation>
</ref>
<ref id="B46">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>X.</given-names></name> <name><surname>Zhou</surname> <given-names>Y.</given-names></name> <name><surname>Dvornek</surname> <given-names>N.</given-names></name> <name><surname>Zhang</surname> <given-names>M.</given-names></name> <name><surname>Gao</surname> <given-names>S.</given-names></name> <name><surname>Zhuang</surname> <given-names>J.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>Braingnn: Interpretable brain graph neural network for fmri analysis</article-title>. <source>Med. Image Anal</source>. <volume>74</volume>, <fpage>102233</fpage>. <pub-id pub-id-type="doi">10.1016/j.media.2021.102233</pub-id><pub-id pub-id-type="pmid">34655865</pub-id></citation></ref>
<ref id="B47">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>Y.</given-names></name> <name><surname>Qian</surname> <given-names>B.</given-names></name> <name><surname>Zhang</surname> <given-names>X.</given-names></name> <name><surname>Liu</surname> <given-names>H.</given-names></name></person-group> (<year>2020</year>). <article-title>Graph neural network-based diagnosis prediction</article-title>. <source>Big Data</source> <volume>8</volume>, <fpage>379</fpage>&#x02013;<lpage>390</lpage>. <pub-id pub-id-type="doi">10.1089/big.2020.0070</pub-id><pub-id pub-id-type="pmid">32783631</pub-id></citation></ref>
<ref id="B48">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>Y.</given-names></name> <name><surname>Tarlow</surname> <given-names>D.</given-names></name> <name><surname>Brockschmidt</surname> <given-names>M.</given-names></name> <name><surname>Zemel</surname> <given-names>R.</given-names></name></person-group> (<year>2015</year>). <article-title>Gated graph sequence neural networks</article-title>. <source>arXiv[Preprint].</source> arXiv:1511.05493. <pub-id pub-id-type="doi">10.48550/arXiv.1511.05493</pub-id></citation>
</ref>
<ref id="B49">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Lin</surname> <given-names>M.</given-names></name> <name><surname>Chen</surname> <given-names>Q.</given-names></name> <name><surname>Yan</surname> <given-names>S.</given-names></name></person-group> (<year>2013</year>). <article-title>Network in network</article-title>. <source>arXiv[Preprint].</source> arXiv:1312.4400. <pub-id pub-id-type="doi">10.48550/arXiv.1312.4400</pub-id></citation>
</ref>
<ref id="B50">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Mattar</surname> <given-names>M. G.</given-names></name> <name><surname>Bassett</surname> <given-names>D. S.</given-names></name></person-group> (<year>2019</year>). <article-title>&#x0201C;Brain network architecture: implications for human learning,&#x0201D;</article-title> in <source>Network Science in Cognitive Psychology</source>, ed M. S. Vitevitch (Routledge), <fpage>30</fpage>&#x02013;<lpage>44</lpage>. <pub-id pub-id-type="doi">10.4324/9780367853259-3</pub-id></citation>
</ref>
<ref id="B51">
<citation citation-type="thesis"><person-group person-group-type="author"><name><surname>Mirakhorli</surname> <given-names>J.</given-names></name> <name><surname>Amindavar</surname> <given-names>H.</given-names></name> <name><surname>Mirakhorli</surname> <given-names>M.</given-names></name></person-group> (<year>2020</year>). <article-title>A new method to predict anomaly in brain network based on graph deep learning</article-title>. <source>Rev. Neurosci</source>. <volume>31</volume>, <fpage>681</fpage>&#x02013;<lpage>689</lpage>. <pub-id pub-id-type="doi">10.1515/revneuro-2019-0108</pub-id><pub-id pub-id-type="pmid">32678803</pub-id></citation></ref>
<ref id="B52">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mostofsky</surname> <given-names>S. H.</given-names></name> <name><surname>Cooper</surname> <given-names>K. L.</given-names></name> <name><surname>Kates</surname> <given-names>W. R.</given-names></name> <name><surname>Denckla</surname> <given-names>M. B.</given-names></name> <name><surname>Kaufmann</surname> <given-names>W. E.</given-names></name></person-group> (<year>2002</year>). <article-title>Smaller prefrontal and premotor volumes in boys with attention-deficit/hyperactivity disorder</article-title>. <source>Biol. Psychiatry</source> <volume>52</volume>, <fpage>785</fpage>&#x02013;<lpage>794</lpage>. <pub-id pub-id-type="doi">10.1016/S0006-3223(02)01412-9</pub-id><pub-id pub-id-type="pmid">12372650</pub-id></citation></ref>
<ref id="B53">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Narayanan</surname> <given-names>N. S.</given-names></name> <name><surname>Laubach</surname> <given-names>M.</given-names></name></person-group> (<year>2006</year>). <article-title>Top-down control of motor cortex ensembles by dorsomedial prefrontal cortex</article-title>. <source>Neuron</source> <volume>52</volume>, <fpage>921</fpage>&#x02013;<lpage>931</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuron.2006.10.021</pub-id><pub-id pub-id-type="pmid">17145511</pub-id></citation></ref>
<ref id="B54">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Ng</surname> <given-names>A. Y.</given-names></name> <name><surname>Jordan</surname> <given-names>M. I.</given-names></name> <name><surname>Weiss</surname> <given-names>Y.</given-names></name></person-group> (<year>2002</year>). <article-title>&#x0201C;On spectral clustering: analysis and an algorithm,&#x0201D;</article-title> in <source>Advances in Neural Information Processing Systems</source>, eds T. Dietterich, S. Becker, and Z. Ghahramani (Vancouver, BC), <fpage>849</fpage>&#x02013;<lpage>856</lpage>.</citation>
</ref>
<ref id="B55">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Nystrom</surname> <given-names>N. A.</given-names></name> <name><surname>Levine</surname> <given-names>M. J.</given-names></name> <name><surname>Roskies</surname> <given-names>R. Z.</given-names></name> <name><surname>Scott</surname> <given-names>J. R.</given-names></name></person-group> (<year>2015</year>). <article-title>&#x0201C;Bridges: a uniquely flexible HPC resource for new communities and data analytics,&#x0201D;</article-title> in <source>Proceedings of the 2015 XSEDE Conference: Scientific Advancements Enabled by Enhanced Cyberinfrastructure</source> (<publisher-loc>St. Louis, MO</publisher-loc>), <fpage>1</fpage>&#x02013;<lpage>8</lpage>. <pub-id pub-id-type="doi">10.1145/2792745.2792775</pub-id></citation>
</ref>
<ref id="B56">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Park</surname> <given-names>J.</given-names></name> <name><surname>Wood</surname> <given-names>J.</given-names></name> <name><surname>Bondi</surname> <given-names>C.</given-names></name> <name><surname>Del Arco</surname> <given-names>A.</given-names></name> <name><surname>Moghaddam</surname> <given-names>B.</given-names></name></person-group> (<year>2016</year>). <article-title>Anxiety evokes hypofrontality and disrupts rule-relevant encoding by dorsomedial prefrontal cortex neurons</article-title>. <source>J. Neurosci</source>. <volume>36</volume>, <fpage>3322</fpage>&#x02013;<lpage>3335</lpage>. <pub-id pub-id-type="doi">10.1523/JNEUROSCI.4250-15.2016</pub-id><pub-id pub-id-type="pmid">26985040</pub-id></citation></ref>
<ref id="B57">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Paszke</surname> <given-names>A.</given-names></name> <name><surname>Gross</surname> <given-names>S.</given-names></name> <name><surname>Massa</surname> <given-names>F.</given-names></name> <name><surname>Lerer</surname> <given-names>A.</given-names></name> <name><surname>Bradbury</surname> <given-names>J.</given-names></name> <name><surname>Chanan</surname> <given-names>G.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>&#x0201C;Pytorch: an imperative style, high-performance deep learning library,&#x0201D;</article-title> in <source>Advances in Neural Information Processing Systems Vol. 32</source>, eds H. Wallach, H. Larochelle, A. Beygelzimer, F. d&#x00027;Alch&#x000E9;-Buc, E. Fox, R. Garnett (Vancouver, BC), <fpage>8026</fpage>&#x02013;<lpage>8037</lpage>.</citation>
</ref>
<ref id="B58">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Power</surname> <given-names>J. D.</given-names></name> <name><surname>Fair</surname> <given-names>D. A.</given-names></name> <name><surname>Schlaggar</surname> <given-names>B. L.</given-names></name> <name><surname>Petersen</surname> <given-names>S. E.</given-names></name></person-group> (<year>2010</year>). <article-title>The development of human functional brain networks</article-title>. <source>Neuron</source> <volume>67</volume>, <fpage>735</fpage>&#x02013;<lpage>748</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuron.2010.08.017</pub-id><pub-id pub-id-type="pmid">20826306</pub-id></citation></ref>
<ref id="B59">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rizzolatti</surname> <given-names>G.</given-names></name> <name><surname>Riggio</surname> <given-names>L.</given-names></name> <name><surname>Dascola</surname> <given-names>I.</given-names></name> <name><surname>Umilt&#x000E1;</surname> <given-names>C.</given-names></name></person-group> (<year>1987</year>). <article-title>Reorienting attention across the horizontal and vertical meridians: evidence in favor of a premotor theory of attention</article-title>. <source>Neuropsychologia</source> <volume>25</volume>, <fpage>31</fpage>&#x02013;<lpage>40</lpage>. <pub-id pub-id-type="doi">10.1016/0028-3932(87)90041-8</pub-id><pub-id pub-id-type="pmid">3574648</pub-id></citation></ref>
<ref id="B60">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rusinek</surname> <given-names>H.</given-names></name> <name><surname>De Santi</surname> <given-names>S.</given-names></name> <name><surname>Frid</surname> <given-names>D.</given-names></name> <name><surname>Tsui</surname> <given-names>W.-H.</given-names></name> <name><surname>Tarshish</surname> <given-names>C. Y.</given-names></name> <name><surname>Convit</surname> <given-names>A.</given-names></name> <etal/></person-group>. (<year>2003</year>). <article-title>Regional brain atrophy rate predicts future cognitive decline: 6-year longitudinal mr imaging study of normal aging</article-title>. <source>Radiology</source> <volume>229</volume>, <fpage>691</fpage>&#x02013;<lpage>696</lpage>. <pub-id pub-id-type="doi">10.1148/radiol.2293021299</pub-id><pub-id pub-id-type="pmid">14657306</pub-id></citation></ref>
<ref id="B61">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sabuncu</surname> <given-names>M. R.</given-names></name> <name><surname>Konukoglu</surname> <given-names>E.</given-names></name> <name><surname>Initiative</surname> <given-names>A. D. N.</given-names></name> <etal/></person-group>. (<year>2015</year>). <article-title>Clinical prediction from structural brain mri scans: a large-scale empirical study</article-title>. <source>Neuroinformatics</source> <volume>13</volume>, <fpage>31</fpage>&#x02013;<lpage>46</lpage>. <pub-id pub-id-type="doi">10.1007/s12021-014-9238-1</pub-id><pub-id pub-id-type="pmid">25048627</pub-id></citation></ref>
<ref id="B62">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Seo</surname> <given-names>S.</given-names></name> <name><surname>Mohr</surname> <given-names>J.</given-names></name> <name><surname>Beck</surname> <given-names>A.</given-names></name> <name><surname>W&#x000FC;stenberg</surname> <given-names>T.</given-names></name> <name><surname>Heinz</surname> <given-names>A.</given-names></name> <name><surname>Obermayer</surname> <given-names>K.</given-names></name></person-group> (<year>2015</year>). <article-title>Predicting the future relapse of alcohol-dependent patients from structural and functional brain images</article-title>. <source>Addict. Biol</source>. <volume>20</volume>, <fpage>1042</fpage>&#x02013;<lpage>1055</lpage>. <pub-id pub-id-type="doi">10.1111/adb.12302</pub-id><pub-id pub-id-type="pmid">26435383</pub-id></citation></ref>
<ref id="B63">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Shchur</surname> <given-names>O.</given-names></name> <name><surname>Mumme</surname> <given-names>M.</given-names></name> <name><surname>Bojchevski</surname> <given-names>A.</given-names></name> <name><surname>G&#x000FC;nnemann</surname> <given-names>S.</given-names></name></person-group> (<year>2018</year>). <article-title>Pitfalls of graph neural network evaluation</article-title>. <source>arXiv[Preprint].</source> arXiv:1811.05868. <pub-id pub-id-type="doi">10.48550/arXiv.1811.05868</pub-id></citation>
</ref>
<ref id="B64">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Simpson</surname> <given-names>S. L.</given-names></name> <name><surname>Hayasaka</surname> <given-names>S.</given-names></name> <name><surname>Laurienti</surname> <given-names>P. J.</given-names></name></person-group> (<year>2011</year>). <article-title>Exponential random graph modeling for complex brain networks</article-title>. <source>PLoS ONE</source> <volume>6</volume>, <fpage>e20039</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0020039</pub-id><pub-id pub-id-type="pmid">21647450</pub-id></citation></ref>
<ref id="B65">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Simpson</surname> <given-names>S. L.</given-names></name> <name><surname>Moussa</surname> <given-names>M. N.</given-names></name> <name><surname>Laurienti</surname> <given-names>P. J.</given-names></name></person-group> (<year>2012</year>). <article-title>An exponential random graph modeling approach to creating group-based representative whole-brain connectivity networks</article-title>. <source>Neuroimage</source> <volume>60</volume>, <fpage>1117</fpage>&#x02013;<lpage>1126</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2012.01.071</pub-id><pub-id pub-id-type="pmid">22281670</pub-id></citation></ref>
<ref id="B66">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sporns</surname> <given-names>O..</given-names></name></person-group> (<year>2011</year>). <article-title>The human connectome: a complex network</article-title>. <source>Ann. N. Y. Acad. Sci</source>. <volume>1224</volume>, <fpage>109</fpage>&#x02013;<lpage>125</lpage>. <pub-id pub-id-type="doi">10.1111/j.1749-6632.2010.05888.x</pub-id><pub-id pub-id-type="pmid">21251014</pub-id></citation></ref>
<ref id="B67">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sporns</surname> <given-names>O..</given-names></name></person-group> (<year>2013</year>). <article-title>The human connectome: origins and challenges</article-title>. <source>Neuroimage</source> <volume>80</volume>, <fpage>53</fpage>&#x02013;<lpage>61</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2013.03.023</pub-id><pub-id pub-id-type="pmid">23528922</pub-id></citation></ref>
<ref id="B68">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sporns</surname> <given-names>O.</given-names></name> <name><surname>Chialvo</surname> <given-names>D. R.</given-names></name> <name><surname>Kaiser</surname> <given-names>M.</given-names></name> <name><surname>Hilgetag</surname> <given-names>C. C.</given-names></name></person-group> (<year>2004</year>). <article-title>Organization, development and function of complex brain networks</article-title>. <source>Trends Cogn. Sci</source>. <volume>8</volume>, <fpage>418</fpage>&#x02013;<lpage>425</lpage>. <pub-id pub-id-type="doi">10.1016/j.tics.2004.07.008</pub-id><pub-id pub-id-type="pmid">15350243</pub-id></citation></ref>
<ref id="B69">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sz&#x000E9;kely</surname> <given-names>G. J.</given-names></name> <name><surname>Rizzo</surname> <given-names>M. L.</given-names></name></person-group> (<year>2009</year>). <article-title>Brownian distance covariance</article-title>. <source>Ann. Appl. Stat</source>. <volume>3</volume>, <fpage>1236</fpage>&#x02013;<lpage>1265</lpage>. <pub-id pub-id-type="doi">10.1214/09-AOAS312</pub-id><pub-id pub-id-type="pmid">20574547</pub-id></citation></ref>
<ref id="B70">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sz&#x000E9;kely</surname> <given-names>G. J.</given-names></name> <name><surname>Rizzo</surname> <given-names>M. L.</given-names></name> <name><surname>Bakirov</surname> <given-names>N. K.</given-names></name></person-group> (<year>2007</year>). <article-title>Measuring and testing dependence by correlation of distances</article-title>. <source>Ann. Stat</source>. <volume>35</volume>, <fpage>2769</fpage>&#x02013;<lpage>2794</lpage>. <pub-id pub-id-type="doi">10.1214/009053607000000505</pub-id></citation>
</ref>
<ref id="B71">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Tang</surname> <given-names>H.</given-names></name> <name><surname>Guo</surname> <given-names>L.</given-names></name> <name><surname>Dennis</surname> <given-names>E.</given-names></name> <name><surname>Thompson</surname> <given-names>P. M.</given-names></name> <name><surname>Huang</surname> <given-names>H.</given-names></name> <name><surname>Ajilore</surname> <given-names>O.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>&#x0201C;Classifying stages of mild cognitive impairment via augmented graph embedding,&#x0201D;</article-title> in <source>Multimodal Brain Image Analysis and Mathematical Foundations of Computational Anatomy</source> (<publisher-loc>Shenzhen</publisher-loc>: <publisher-name>Springer</publisher-name>), <fpage>30</fpage>&#x02013;<lpage>38</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-030-33226-6_4</pub-id></citation>
</ref>
<ref id="B72">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Tang</surname> <given-names>H.</given-names></name> <name><surname>Guo</surname> <given-names>L.</given-names></name> <name><surname>Fu</surname> <given-names>X.</given-names></name> <name><surname>Qu</surname> <given-names>B.</given-names></name> <name><surname>Thompson</surname> <given-names>P. M.</given-names></name> <name><surname>Huang</surname> <given-names>H.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>&#x0201C;Hierarchical brain embedding using explainable graph learning,&#x0201D;</article-title> in <source>2022 IEEE 19th International Symposium on Biomedical Imaging (ISBI)</source> (<publisher-loc>Kolkata</publisher-loc>: <publisher-name>IEEE</publisher-name>), <fpage>1</fpage>&#x02013;<lpage>5</lpage>. <pub-id pub-id-type="doi">10.1109/ISBI52829.2022.9761543</pub-id><pub-id pub-id-type="pmid">27295638</pub-id></citation></ref>
<ref id="B73">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tang</surname> <given-names>H.</given-names></name> <name><surname>Ma</surname> <given-names>G.</given-names></name> <name><surname>He</surname> <given-names>L.</given-names></name> <name><surname>Huang</surname> <given-names>H.</given-names></name> <name><surname>Zhan</surname> <given-names>L.</given-names></name></person-group> (<year>2021</year>). <article-title>Commpool: an interpretable graph pooling framework for hierarchical graph representation learning</article-title>. <source>Neural Netw</source>. <volume>143</volume>, <fpage>669</fpage>&#x02013;<lpage>677</lpage>. <pub-id pub-id-type="doi">10.1016/j.neunet.2021.07.028</pub-id><pub-id pub-id-type="pmid">34375808</pub-id></citation></ref>
<ref id="B74">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tomlinson</surname> <given-names>C. E.</given-names></name> <name><surname>Laurienti</surname> <given-names>P. J.</given-names></name> <name><surname>Lyday</surname> <given-names>R. G.</given-names></name> <name><surname>Simpson</surname> <given-names>S. L.</given-names></name></person-group> (<year>2021</year>). <article-title>A regression framework for brain network distance metrics</article-title>. <source>Netw. Neurosci</source>. <volume>6</volume>, <fpage>49</fpage>&#x02013;<lpage>68</lpage>. <pub-id pub-id-type="doi">10.1101/2021.02.26.432910</pub-id><pub-id pub-id-type="pmid">35350586</pub-id></citation></ref>
<ref id="B75">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Towns</surname> <given-names>J.</given-names></name> <name><surname>Cockerill</surname> <given-names>T.</given-names></name> <name><surname>Dahan</surname> <given-names>M.</given-names></name> <name><surname>Foster</surname> <given-names>I.</given-names></name> <name><surname>Gaither</surname> <given-names>K.</given-names></name> <name><surname>Grimshaw</surname> <given-names>A.</given-names></name> <etal/></person-group>. (<year>2014</year>). <article-title>Xsede: accelerating scientific discovery</article-title>. <source>Comput. Sci. Eng</source>. <volume>16</volume>, <fpage>62</fpage>&#x02013;<lpage>74</lpage>. <pub-id pub-id-type="doi">10.1109/MCSE.2014.80</pub-id></citation>
</ref>
<ref id="B76">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Uddin</surname> <given-names>L. Q.</given-names></name> <name><surname>Supekar</surname> <given-names>K.</given-names></name> <name><surname>Lynch</surname> <given-names>C. J.</given-names></name> <name><surname>Khouzam</surname> <given-names>A.</given-names></name> <name><surname>Phillips</surname> <given-names>J.</given-names></name> <name><surname>Feinstein</surname> <given-names>C.</given-names></name> <etal/></person-group>. (<year>2013</year>). <article-title>Salience network-based classification and prediction of symptom severity in children with autism</article-title>. <source>JAMA Psychiatry</source> <volume>70</volume>, <fpage>869</fpage>&#x02013;<lpage>879</lpage>. <pub-id pub-id-type="doi">10.1001/jamapsychiatry.2013.104</pub-id><pub-id pub-id-type="pmid">23803651</pub-id></citation></ref>
<ref id="B77">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Van Den Heuvel</surname> <given-names>M. P.</given-names></name> <name><surname>Kahn</surname> <given-names>R. S.</given-names></name> <name><surname>Go ni</surname> <given-names>J.</given-names></name> <name><surname>Sporns</surname> <given-names>O.</given-names></name></person-group> (<year>2012</year>). <article-title>High-cost, high-capacity backbone for global brain communication</article-title>. <source>Proc. Natl. Acad. Sci. U.S.A</source>. <volume>109</volume>, <fpage>11372</fpage>&#x02013;<lpage>11377</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.1203593109</pub-id><pub-id pub-id-type="pmid">22711833</pub-id></citation></ref>
<ref id="B78">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Van Essen</surname> <given-names>D. C.</given-names></name> <name><surname>Smith</surname> <given-names>S. M.</given-names></name> <name><surname>Barch</surname> <given-names>D. M.</given-names></name> <name><surname>Behrens</surname> <given-names>T. E.</given-names></name> <name><surname>Yacoub</surname> <given-names>E.</given-names></name> <name><surname>Ugurbil</surname> <given-names>K.</given-names></name> <etal/></person-group>. (<year>2013</year>). <article-title>The wu-minn human connectome project: an overview</article-title>. <source>Neuroimage</source> <volume>80</volume>, <fpage>62</fpage>&#x02013;<lpage>79</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2013.05.041</pub-id><pub-id pub-id-type="pmid">23684880</pub-id></citation></ref>
<ref id="B79">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Varoquaux</surname> <given-names>G.</given-names></name> <name><surname>Craddock</surname> <given-names>R. C.</given-names></name></person-group> (<year>2013</year>). <article-title>Learning and comparing functional connectomes across subjects</article-title>. <source>Neuroimage</source> <volume>80</volume>, <fpage>405</fpage>&#x02013;<lpage>415</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2013.04.007</pub-id><pub-id pub-id-type="pmid">23583357</pub-id></citation></ref>
<ref id="B80">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Veli&#x0010D;kovi&#x00107;</surname> <given-names>P.</given-names></name> <name><surname>Cucurull</surname> <given-names>G.</given-names></name> <name><surname>Casanova</surname> <given-names>A.</given-names></name> <name><surname>Romero</surname> <given-names>A.</given-names></name> <name><surname>Lio</surname> <given-names>P.</given-names></name> <name><surname>Bengio</surname> <given-names>Y.</given-names></name></person-group> (<year>2017</year>). <article-title>Graph attention networks</article-title>. <source>arXiv[Preprint].</source> arXiv:1710.10903. <pub-id pub-id-type="doi">10.48550/arXiv.1710.10903</pub-id></citation>
</ref>
<ref id="B81">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Venkatraman</surname> <given-names>V.</given-names></name> <name><surname>Rosati</surname> <given-names>A. G.</given-names></name> <name><surname>Taren</surname> <given-names>A. A.</given-names></name> <name><surname>Huettel</surname> <given-names>S. A.</given-names></name></person-group> (<year>2009</year>). <article-title>Resolving response, decision, and strategic control: evidence for a functional topography in dorsomedial prefrontal cortex</article-title>. <source>J. Neurosci</source>. <volume>29</volume>, <fpage>13158</fpage>&#x02013;<lpage>13164</lpage>. <pub-id pub-id-type="doi">10.1523/JNEUROSCI.2708-09.2009</pub-id><pub-id pub-id-type="pmid">19846703</pub-id></citation></ref>
<ref id="B82">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Vinyals</surname> <given-names>O.</given-names></name> <name><surname>Bengio</surname> <given-names>S.</given-names></name> <name><surname>Kudlur</surname> <given-names>M.</given-names></name></person-group> (<year>2015</year>). <article-title>Order matters: sequence to sequence for sets</article-title>. <source>arXiv[Preprint].</source> arXiv:1511.06391. <pub-id pub-id-type="doi">10.48550/arXiv.1511.06391</pub-id></citation>
</ref>
<ref id="B83">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Vogt</surname> <given-names>B. A.</given-names></name> <name><surname>Laureys</surname> <given-names>S.</given-names></name></person-group> (<year>2005</year>). <article-title>Posterior cingulate, precuneal and retrosplenial cortices: cytology and components of the neural network correlates of consciousness</article-title>. <source>Prog. Brain Res</source>. <volume>150</volume>, <fpage>205</fpage>&#x02013;<lpage>217</lpage>. <pub-id pub-id-type="doi">10.1016/S0079-6123(05)50015-3</pub-id><pub-id pub-id-type="pmid">16186025</pub-id></citation></ref>
<ref id="B84">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>J.</given-names></name> <name><surname>Ma</surname> <given-names>A.</given-names></name> <name><surname>Chang</surname> <given-names>Y.</given-names></name> <name><surname>Gong</surname> <given-names>J.</given-names></name> <name><surname>Jiang</surname> <given-names>Y.</given-names></name> <name><surname>Qi</surname> <given-names>R.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>scGNN is a novel graph neural network framework for single-cell RNA-Seq analyses</article-title>. <source>Nat. Commun</source>. <volume>12</volume>, <fpage>1</fpage>&#x02013;<lpage>11</lpage>. <pub-id pub-id-type="doi">10.1038/s41467-021-22197-x</pub-id><pub-id pub-id-type="pmid">35508488</pub-id></citation></ref>
<ref id="B85">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>L.</given-names></name> <name><surname>Durante</surname> <given-names>D.</given-names></name> <name><surname>Jung</surname> <given-names>R. E.</given-names></name> <name><surname>Dunson</surname> <given-names>D. B.</given-names></name></person-group> (<year>2017</year>). <article-title>Bayesian network-response regression</article-title>. <source>Bioinformatics</source> <volume>33</volume>, <fpage>1859</fpage>&#x02013;<lpage>1866</lpage>. <pub-id pub-id-type="doi">10.1093/bioinformatics/btx050</pub-id><pub-id pub-id-type="pmid">28165112</pub-id></citation></ref>
<ref id="B86">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Warren</surname> <given-names>D. E.</given-names></name> <name><surname>Denburg</surname> <given-names>N. L.</given-names></name> <name><surname>Power</surname> <given-names>J. D.</given-names></name> <name><surname>Bruss</surname> <given-names>J.</given-names></name> <name><surname>Waldron</surname> <given-names>E. J.</given-names></name> <name><surname>Sun</surname> <given-names>H.</given-names></name> <etal/></person-group>. (<year>2017</year>). <article-title>Brain network theory can predict whether neuropsychological outcomes will differ from clinical expectations</article-title>. <source>Arch. Clin. Neuropsychol</source>. <volume>32</volume>, <fpage>40</fpage>&#x02013;<lpage>52</lpage>. <pub-id pub-id-type="doi">10.1093/arclin/acw091</pub-id><pub-id pub-id-type="pmid">27789443</pub-id></citation></ref>
<ref id="B87">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Wee</surname> <given-names>C.-Y.</given-names></name> <name><surname>Liu</surname> <given-names>C.</given-names></name> <name><surname>Lee</surname> <given-names>A.</given-names></name> <name><surname>Poh</surname> <given-names>J. S.</given-names></name> <name><surname>Ji</surname> <given-names>H.</given-names></name> <name><surname>Qiu</surname> <given-names>A.</given-names></name> <etal/></person-group>. (<year>2019</year>). <article-title>Cortical graph neural network for ad and mci diagnosis and transfer learning across populations</article-title>. <source>Neuroimage Clin</source>. <volume>23</volume>, <fpage>101929</fpage>. <pub-id pub-id-type="doi">10.1016/j.nicl.2019.101929</pub-id><pub-id pub-id-type="pmid">31491832</pub-id></citation></ref>
<ref id="B88">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Whitfield-Gabrieli</surname> <given-names>S.</given-names></name> <name><surname>Nieto-Castanon</surname> <given-names>A.</given-names></name></person-group> (<year>2012</year>). <article-title>Conn: a functional connectivity toolbox for correlated and anticorrelated brain networks</article-title>. <source>Brain Connect</source>. <volume>2</volume>, <fpage>125</fpage>&#x02013;<lpage>141</lpage>. <pub-id pub-id-type="doi">10.1089/brain.2012.0073</pub-id><pub-id pub-id-type="pmid">22642651</pub-id></citation></ref>
<ref id="B89">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Xia</surname> <given-names>C. H.</given-names></name> <name><surname>Ma</surname> <given-names>Z.</given-names></name> <name><surname>Cui</surname> <given-names>Z.</given-names></name> <name><surname>Bzdok</surname> <given-names>D.</given-names></name> <name><surname>Thirion</surname> <given-names>B.</given-names></name> <name><surname>Bassett</surname> <given-names>D. S.</given-names></name> <etal/></person-group>. (<year>2020</year>). <source>Multi-Scale Network Regression for Brain-Phenotype Associations</source>. Technical report, Wiley Online Library. <pub-id pub-id-type="doi">10.1002/hbm.24982</pub-id><pub-id pub-id-type="pmid">32216125</pub-id></citation></ref>
<ref id="B90">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Xuan</surname> <given-names>P.</given-names></name> <name><surname>Pan</surname> <given-names>S.</given-names></name> <name><surname>Zhang</surname> <given-names>T.</given-names></name> <name><surname>Liu</surname> <given-names>Y.</given-names></name> <name><surname>Sun</surname> <given-names>H.</given-names></name></person-group> (<year>2019</year>). <article-title>Graph convolutional network and convolutional neural network based method for predicting lncrna-disease associations</article-title>. <source>Cells</source> <volume>8</volume>, <fpage>1012</fpage>. <pub-id pub-id-type="doi">10.3390/cells8091012</pub-id><pub-id pub-id-type="pmid">31480350</pub-id></citation></ref>
<ref id="B91">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Ying</surname> <given-names>Z.</given-names></name> <name><surname>You</surname> <given-names>J.</given-names></name> <name><surname>Morris</surname> <given-names>C.</given-names></name> <name><surname>Ren</surname> <given-names>X.</given-names></name> <name><surname>Hamilton</surname> <given-names>W.</given-names></name> <name><surname>Leskovec</surname> <given-names>J.</given-names></name></person-group> (<year>2018</year>). <article-title>&#x0201C;Hierarchical graph representation learning with differentiable pooling,&#x0201D;</article-title> in <source>Advances in Neural Information Processing Systems</source>, eds S. Bengio, H. Wallach, H. Larochelle, K. Grauman, N. Cesa-Bianchi, and R. Garnett (Montreal, QC), <fpage>4805</fpage>&#x02013;<lpage>4815</lpage>.<pub-id pub-id-type="pmid">34577277</pub-id></citation></ref>
<ref id="B92">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Yuan</surname> <given-names>H.</given-names></name> <name><surname>Ji</surname> <given-names>S.</given-names></name></person-group> (<year>2020</year>). <article-title>&#x0201C;Structpool: structured graph pooling via conditional random fields,&#x0201D;</article-title> in <source>Proceedings of the 8th International Conference on Learning Representations</source> (<publisher-loc>Addis Ababa</publisher-loc>).</citation>
</ref>
<ref id="B93">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Zhan</surname> <given-names>L.</given-names></name> <name><surname>Zhou</surname> <given-names>J.</given-names></name> <name><surname>Wang</surname> <given-names>Y.</given-names></name> <name><surname>Jin</surname> <given-names>Y.</given-names></name> <name><surname>Jahanshad</surname> <given-names>N.</given-names></name> <name><surname>Prasad</surname> <given-names>G.</given-names></name> <etal/></person-group>. (<year>2015</year>). <article-title>Comparison of nine tractography algorithms for detecting abnormal structural brain networks in Alzheimer&#x00027;s disease</article-title>. <source>Front. Aging Neurosci</source>. <volume>7</volume>, <fpage>48</fpage>. <pub-id pub-id-type="doi">10.3389/fnagi.2015.00048</pub-id><pub-id pub-id-type="pmid">25926791</pub-id></citation></ref>
<ref id="B94">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>M.</given-names></name> <name><surname>Cui</surname> <given-names>Z.</given-names></name> <name><surname>Neumann</surname> <given-names>M.</given-names></name> <name><surname>Chen</surname> <given-names>Y.</given-names></name></person-group> (<year>2018</year>). <article-title>&#x0201C;An end-to-end deep learning architecture for graph classification,&#x0201D;</article-title> in <source>Thirty-Second AAAI Conference on Artificial Intelligence</source> (<publisher-loc>New Orleans, LA</publisher-loc>: <publisher-name>AAAI</publisher-name>). <pub-id pub-id-type="doi">10.1609/aaai.v32i1.11782</pub-id></citation>
</ref>
<ref id="B95">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>W.</given-names></name> <name><surname>Zhan</surname> <given-names>L.</given-names></name> <name><surname>Thompson</surname> <given-names>P.</given-names></name> <name><surname>Wang</surname> <given-names>Y.</given-names></name></person-group> (<year>2020</year>). <article-title>&#x0201C;Deep representation learning for multimodal brain networks,&#x0201D;</article-title> in <source>International Conference on Medical Image Computing and Computer-Assisted Intervention</source> (<publisher-loc>Lima</publisher-loc>: <publisher-name>Springer</publisher-name>), <fpage>613</fpage>&#x02013;<lpage>624</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-030-59728-3_60</pub-id><pub-id pub-id-type="pmid">34296225</pub-id></citation></ref>
<ref id="B96">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>Y.</given-names></name> <name><surname>Huang</surname> <given-names>H.</given-names></name></person-group> (<year>2019</year>). <article-title>&#x0201C;New graph-blind convolutional network for brain connectome data analysis,&#x0201D;</article-title> in <source>International Conference on Information Processing in Medical Imaging</source> (<publisher-loc>Hong Kong</publisher-loc>: <publisher-name>Springer</publisher-name>), <fpage>669</fpage>&#x02013;<lpage>681</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-030-20351-1_52</pub-id></citation>
</ref>
<ref id="B97">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>Y.</given-names></name> <name><surname>Zhan</surname> <given-names>L.</given-names></name> <name><surname>Cai</surname> <given-names>W.</given-names></name> <name><surname>Thompson</surname> <given-names>P.</given-names></name> <name><surname>Huang</surname> <given-names>H.</given-names></name></person-group> (<year>2019a</year>). <article-title>&#x0201C;Integrating heterogeneous brain networks for predicting brain disease conditions,&#x0201D;</article-title> in <source>International Conference on Medical Image Computing and Computer-Assisted Intervention</source> (<publisher-loc>Shenzhen</publisher-loc>: <publisher-name>Springer</publisher-name>), <fpage>214</fpage>&#x02013;<lpage>222</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-030-32251-9_24</pub-id></citation>
</ref>
<ref id="B98">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>Z.</given-names></name> <name><surname>Allen</surname> <given-names>G. I.</given-names></name> <name><surname>Zhu</surname> <given-names>H.</given-names></name> <name><surname>Dunson</surname> <given-names>D.</given-names></name></person-group> (<year>2019b</year>). <article-title>Tensor network factorizations: Relationships between brain structural connectomes and traits</article-title>. <source>Neuroimage</source> <volume>197</volume>, <fpage>330</fpage>&#x02013;<lpage>343</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2019.04.027</pub-id><pub-id pub-id-type="pmid">31029870</pub-id></citation></ref>
<ref id="B99">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>Z.</given-names></name> <name><surname>Bu</surname> <given-names>J.</given-names></name> <name><surname>Ester</surname> <given-names>M.</given-names></name> <name><surname>Zhang</surname> <given-names>J.</given-names></name> <name><surname>Yao</surname> <given-names>C.</given-names></name> <name><surname>Yu</surname> <given-names>Z.</given-names></name> <etal/></person-group>. (<year>2019c</year>). <article-title>Hierarchical graph pooling with structure learning</article-title>. <source>arXiv[Preprint].</source> arXiv:1911.05954. <pub-id pub-id-type="doi">10.48550/arXiv.1911.05954</pub-id></citation>
</ref>
</ref-list>
<fn-group>
<fn id="fn0001"><p><sup>1</sup><ext-link ext-link-type="uri" xlink:href="https://wiki.humanconnectome.org">https://wiki.humanconnectome.org</ext-link></p></fn>
</fn-group>
</back>
</article>