<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article article-type="research-article" dtd-version="2.3" xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Built Environ.</journal-id>
<journal-title>Frontiers in Built Environment</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Built Environ.</abbrev-journal-title>
<issn pub-type="epub">2297-3362</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">1402619</article-id>
<article-id pub-id-type="doi">10.3389/fbuil.2024.1402619</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Built Environment</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Predictive modeling of rocking-induced settlement in shallow foundations using ensemble machine learning and neural networks</article-title>
<alt-title alt-title-type="left-running-head">Gajan</alt-title>
<alt-title alt-title-type="right-running-head">
<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fbuil.2024.1402619">10.3389/fbuil.2024.1402619</ext-link>
</alt-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Gajan</surname>
<given-names>Sivapalan</given-names>
</name>
<xref ref-type="corresp" rid="c001">&#x2a;</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2563132/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing - original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/"/>
</contrib>
</contrib-group>
<aff>
<institution>College of Engineering</institution>, <institution>SUNY Polytechnic Institute</institution>, <addr-line>Utica</addr-line>, <addr-line>NY</addr-line>, <country>United States</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>
<bold>Edited by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2369589/overview">Chenying Liu</ext-link>, Georgia Institute of Technology, United States</p>
</fn>
<fn fn-type="edited-by">
<p>
<bold>Reviewed by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2674774/overview">Ruijia Wang</ext-link>, Georgia Institute of Technology, United States</p>
<p>
<ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/288137/overview">Panagiotis G. Asteris</ext-link>, School of Pedagogical and Technological Education, Greece</p>
</fn>
<corresp id="c001">&#x2a;Correspondence: Sivapalan Gajan, <email>gajans@sunypoly.edu</email>
</corresp>
</author-notes>
<pub-date pub-type="epub">
<day>05</day>
<month>06</month>
<year>2024</year>
</pub-date>
<pub-date pub-type="collection">
<year>2024</year>
</pub-date>
<volume>10</volume>
<elocation-id>1402619</elocation-id>
<history>
<date date-type="received">
<day>17</day>
<month>03</month>
<year>2024</year>
</date>
<date date-type="accepted">
<day>17</day>
<month>05</month>
<year>2024</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2024 Gajan.</copyright-statement>
<copyright-year>2024</copyright-year>
<copyright-holder>Gajan</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<sec>
<title>Introduction</title>
<p>The objective of this study is to develop predictive models for rocking-induced permanent settlement in shallow foundations during earthquake loading using stacking, bagging and boosting ensemble machine learning (ML) and artificial neural network (ANN) models.</p>
</sec>
<sec>
<title>Methods</title>
<p>The ML models are developed using supervised learning technique and results obtained from rocking foundation experiments conducted on shaking tables and centrifuges. The overall performance of ML models are evaluated using k-fold cross validation tests and mean absolute percentage error (MAPE) and mean absolute error (MAE) in their predictions.</p>
</sec>
<sec>
<title>Results</title>
<p>The performances of all six nonlinear ML models developed in this study are relatively consistent in terms of prediction accuracy with their average MAPE varying between 0.64 and 0.86 in final k-fold cross validation tests.</p>
</sec>
<sec>
<title>Discussion</title>
<p>The overall average MAE in predictions of all nonlinear ML models are smaller than 0.006, implying that the ML models developed in this study have the potential to predict permanent settlement of rocking foundations with reasonable accuracy in practical applications.</p>
</sec>
</abstract>
<kwd-group>
<kwd>earthquake engineering</kwd>
<kwd>shallow foundation</kwd>
<kwd>soil-structure interaction</kwd>
<kwd>machine learning</kwd>
<kwd>artificial neural network</kwd>
</kwd-group>
<contract-sponsor id="cn001">National Science Foundation<named-content content-type="fundref-id">10.13039/100000001</named-content>
</contract-sponsor>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Earthquake Engineering</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec id="s1">
<title>1 Introduction</title>
<p>Structural fuse mechanisms, active and passive energy dissipation devices, and base isolation techniques have generally been used to improve the seismic performance of important structures (e.g., <xref ref-type="bibr" rid="B48">Soong and Spencer, 2002</xref>; <xref ref-type="bibr" rid="B49">Symans et al., 2008</xref>). Geotechnical seismic isolation, using various techniques, is a relatively new area of research that has been studied to some extent in the recent past. As such, the findings of recent experimental research on rocking shallow foundations reveal that rocking mechanism dissipates seismic energy in soil, reduces seismic demands imposed on structures, and can be used as geotechnical seismic isolation mechanism to improve the overall seismic performance of structures they support (e.g., <xref ref-type="bibr" rid="B25">Gajan et al., 2005</xref>; <xref ref-type="bibr" rid="B43">Paolucci et al., 2008</xref>; <xref ref-type="bibr" rid="B4">Anastasopoulos et al., 2010</xref>; <xref ref-type="bibr" rid="B41">Loli et al., 2014</xref>; <xref ref-type="bibr" rid="B39">Ko et al., 2019</xref>; <xref ref-type="bibr" rid="B34">Hakhamaneshi et al., 2020</xref>; <xref ref-type="bibr" rid="B7">Arabpanahan et al., 2023</xref>; <xref ref-type="bibr" rid="B36">Irani et al., 2023</xref>). In addition, it has been shown that appropriately-designed rocking shallow foundations can be as effective as structural energy dissipating mechanisms in terms of reducing the seismic demands experienced by key structural members (<xref ref-type="bibr" rid="B27">Gajan and Saravanathiiban, 2011</xref>; <xref ref-type="bibr" rid="B22">Gajan and Godagama, 2019</xref>). However, the material nonlinearities (yielding of soil and resulting plastic deformations), the geometrical nonlinearities associated with the soil-foundation system (partial separation of footing from supporting soil), and the uncertainties in soil properties and earthquake loading parameters pose significant challenges to the accurate prediction of permanent deformations in foundation during rocking.</p>
<p>A recent article reviews the commonly used numerical methods for modeling dynamic soil-structure interaction in shallow foundations during earthquake loading including spring-based Winkler foundation models, macro-element models for soil-foundation system, and continuum-based models (<xref ref-type="bibr" rid="B10">Bapir et al., 2023</xref>). Researchers in the past have developed constitutive models that relate the cyclic forces and displacements acting on the foundation during seismic loading and performed numerical simulations of rocking foundations incorporating nonlinear dynamic soil-foundation interaction (e.g., <xref ref-type="bibr" rid="B2">Allotey and Naggar, 2003</xref>; <xref ref-type="bibr" rid="B24">Gajan and Kutter, 2009</xref>; <xref ref-type="bibr" rid="B26">Gajan et al., 2010</xref>; <xref ref-type="bibr" rid="B11">Chatzigogos et al., 2011</xref>; <xref ref-type="bibr" rid="B18">Figini et al., 2012</xref>; <xref ref-type="bibr" rid="B44">Pelekis et al., 2021</xref>). Though the mechanics-based constitutive models and numerical simulation approaches for rocking foundations have sound theoretical basis, they include assumptions and simplifications in their formulations. Machine learning (ML) models, on the other hand, have the ability to generalize experimental behavior when they are trained and tested on data and results that cover a wide range of experiments conducted independently by different researches and using different types of equipment. Although the ML models can have their drawbacks (for example, they may not be able to capture every physical mechanism that governs the problem), they are capable of capturing the hidden complex relationships in data and have the potential to be used in addition to mechanics-based numerical models in practical applications.</p>
<p>As the number of widely available experimental databases increases, the use of ML techniques to model geotechnical engineering problems increases exponentially, especially in last 30&#xa0;years (<xref ref-type="bibr" rid="B17">Ebid, 2021</xref>). For example, support vector machines, decision trees, and neural networks have been used in geotechnical engineering applications such as compaction characteristics of soils, mechanical properties and strength of soils, foundation engineering, soil slope stability, and geotechnical earthquake engineering (e.g., <xref ref-type="bibr" rid="B32">Goh and Goh, 2007</xref>; <xref ref-type="bibr" rid="B42">Mozumder and Laskar, 2015</xref>; <xref ref-type="bibr" rid="B45">Pham et al., 2017</xref>; <xref ref-type="bibr" rid="B37">Jeremiah et al., 2021</xref>; <xref ref-type="bibr" rid="B3">Amjad et al., 2022</xref>). Artificial neural networks, gene expression programming, and neuro-swam system algorithms have been used successfully for the prediction of settlement of shallow and deep foundations (<xref ref-type="bibr" rid="B8">Armaghani et al., 2018</xref>; <xref ref-type="bibr" rid="B8">Armaghani et al., 2020</xref>; <xref ref-type="bibr" rid="B15">Diaz et al., 2018</xref>). Recently, ML-based predictive models have been developed for normalized seismic energy dissipation, peak rotation, and acceleration amplification ratio of rocking foundations during earthquake loading (<xref ref-type="bibr" rid="B19">Gajan, 2021</xref>; <xref ref-type="bibr" rid="B20">Gajan, 2022</xref>; <xref ref-type="bibr" rid="B21">Gajan, 2023</xref>). A recent review article summarizes the recent advances in application of machine learning and deep learning tools to predict the properties of cementitious composites (concrete and fiber-reinforce concrete) at elevated temperatures (<xref ref-type="bibr" rid="B1">Alkayem et al., 2024</xref>). In addition to ML models alone, theory-guided ML is also becoming popular slowly in predictive modeling in engineering (<xref ref-type="bibr" rid="B38">Karpatne et al., 2017</xref>) and in geotechnical engineering in particular (<xref ref-type="bibr" rid="B51">Xiong et al., 2023</xref>).</p>
</sec>
<sec id="s2">
<title>2 Research significance and objective</title>
<p>Empirical relationships have been proposed for the estimation of permanent settlement of rocking shallow foundations using either the static vertical factor of safety (FS<sub>v</sub>) or critical contact area ratio (A/A<sub>c</sub>) of foundation and the cumulative rotation experienced by the foundation during earthquake loading (<xref ref-type="bibr" rid="B14">Deng et al., 2012</xref>; <xref ref-type="bibr" rid="B35">Hamidpour et al., 2022</xref>). A/A<sub>c</sub> is conceptually a factor of safety for rocking foundations taking into account the change of contact area of the footing with the soil during rocking (<xref ref-type="bibr" rid="B23">Gajan and Kutter, 2008</xref>). The cumulative rotation (&#x3b8;<sub>cum</sub>) of the foundation is defined based on the instantaneous peak rotations experienced by the foundation (local maximums) that exceed a threshold value (<xref ref-type="bibr" rid="B14">Deng et al., 2012</xref>). The threshold value for this peak rotation is defined arbitrarily as 0.001 rad, assuming that rotations smaller than 0.001&#xa0;rad do not cause permanent settlement. One difficulty or drawback of the cumulative rotation approach for the estimation of permanent settlement is that the &#x3b8;<sub>cum</sub> of the foundation can only be known after the earthquake shaking is over (i.e., &#x3b8;<sub>cum</sub> itself is a performance parameter of rocking foundation, and cannot be known before the earthquake to predict the rocking induced settlement of foundation).</p>
<p>The objective of this study is to develop predictive models for rocking-induced permanent settlement in shallow foundations during earthquake loading using stacking, bagging and boosting ensemble ML models and artificial neural network (ANN) model. Support vector regression (SVR), k-nearest neighbors regression (KNN), stacked generalization (Stacking), random forest regression (RFR), adaptive boosting regression (ABR), and fully-connected artificial neural network regression (ANN) algorithms have been utilized in this study. The ML models are trained and tested using results obtained from rocking foundation experiments conducted on shaking tables and centrifuges. Critical contact area ratio of foundation, slenderness ratio of structure, rocking coefficient of rocking soil-foundation-structure system, peak ground acceleration of earthquake, Arias intensity of earthquake ground motion, and a binary feature for type of soil have been used as input features to ML models. The significance of the study presented in this paper is that this is the first time data-driven predictive models are developed for rocking-induced settlement of shallow foundations using ML and deep learning algorithms. In addition, the input features used are in the form of normalized, non-dimensional soil-foundation system parameters and earthquake ground motion parameters that are readily available for design of structures in majority of the seismic zones.</p>
</sec>
<sec id="s3">
<title>3 Rocking-induced settlement in shallow foundations</title>
<sec id="s3-1">
<title>3.1 Settlement-rotation relationship</title>
<p>
<xref ref-type="fig" rid="F1">Figure 1A</xref> illustrates the schematic of a rocking structure-foundation system and the forces and displacements acting on the foundation during earthquake loading. For rocking on a 2-D plane, these forces and displacements include vertical load (V), settlement (s), shear force (H), sliding (u), moment (M), and rotation (&#x3b8;). <xref ref-type="fig" rid="F1">Figures 1B, C</xref> present experimental results for cyclic settlement <italic>versus</italic> rotation response at the base center point of foundation supported by sandy soils. Note that the settlement is normalized by the width of the footing (NS &#x3d; s/B). <xref ref-type="fig" rid="F1">Figure 1B</xref> presents the results obtained from a centrifuge experiment (FS<sub>v</sub> &#x3d; 4 and a<sub>max</sub> &#x3d; 0.55&#xa0;g) (<xref ref-type="bibr" rid="B23">Gajan and Kutter, 2008</xref>), while <xref ref-type="fig" rid="F1">Figure 1C</xref> presents the results obtained from a shaking table experiment (FS<sub>v</sub> &#x3d; 24 and a<sub>max</sub> &#x3d; 0.36&#xa0;g) (<xref ref-type="bibr" rid="B6">Antonellis et al., 2015</xref>), where FS<sub>v</sub> is the static factor of safety for bearing capacity failure and a<sub>max</sub> is the peak ground acceleration of the earthquake.</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption>
<p>
<bold>(A)</bold> Illustration of major forces and displacements acting on a rocking structure, and experimental results of settlement <italic>versus</italic> rotation response at the base of rocking foundations: <bold>(B)</bold> FS<sub>v</sub> &#x3d; 4 and a<sub>max</sub> &#x3d; 0.55&#xa0;g and <bold>(C)</bold> FS<sub>v</sub> &#x3d; 24 and a<sub>max</sub> &#x3d; 0.36&#xa0;g.</p>
</caption>
<graphic xlink:href="fbuil-10-1402619-g001.tif"/>
</fig>
<p>As shown in <xref ref-type="fig" rid="F1">Figure 1B</xref>, the foundation keeps accumulating settlement as the seismic shaking progresses and as the footing rocks, with the permanent settlement being equal to about 1.6% of the width of the footing (NS &#x3d; 0.016) at the end of shaking. For the other test (FS<sub>v</sub> &#x3d; 24, <xref ref-type="fig" rid="F1">Figure 1C</xref>), as the footing rocks, a gap opens between the soil and the footing and it results in instantaneous uplift of the footing (the negative values on NS axes represent uplift of footing). Therefore, the settlement-rotation response shows smaller permanent settlement for higher FS<sub>v</sub> foundations (NS &#x3d; 0.00175). For relatively lower FS<sub>v</sub> foundations, the settlement-rotation response is dominated by yielding of soil (material nonlinearity) whereas for higher FS<sub>v</sub> foundations, it is dominated by uplift of footing (geometrical nonlinearity). The rocking-induced permanent settlement in shallow foundations depends primarily on FS<sub>v</sub> and the magnitude, number of cycles, and duration of earthquake loading.</p>
</sec>
<sec id="s3-2">
<title>3.2 Experimental results and key parameters</title>
<p>The experimental data and results utilized in this study are extracted from a rocking foundations database (<xref ref-type="bibr" rid="B29">Gavras et al., 2020</xref>). This database is freely accessible and available in Design-Safe-CI website (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.13019/3rqyd929">https://doi.org/10.13019/3rqyd929</ext-link>) (<xref ref-type="bibr" rid="B30">Gavras et al., 2023</xref>). This database has results obtained from centrifuge and shaking table experiments on rocking foundations conducted by several researchers (<xref ref-type="bibr" rid="B23">Gajan and Kutter, 2008</xref>; <xref ref-type="bibr" rid="B14">Deng et al., 2012</xref>; <xref ref-type="bibr" rid="B13">Deng and Kutter, 2012</xref>; <xref ref-type="bibr" rid="B16">Drosos et al., 2012</xref>; <xref ref-type="bibr" rid="B33">Hakhamaneshi et al., 2012</xref>; <xref ref-type="bibr" rid="B5">Anastasopoulos et al., 2013</xref>; <xref ref-type="bibr" rid="B6">Antonellis et al., 2015</xref>; <xref ref-type="bibr" rid="B50">Tsatsis and Anastasopoulos, 2015</xref>). A summary of results of these experiments, in terms of rocking foundations performance parameters, is also available in the literature (<xref ref-type="bibr" rid="B28">Gajan et al., 2021</xref>).</p>
<p>Rocking coefficient (C<sub>r</sub>) is essentially the normalized ultimate moment capacity of a rocking foundation and is given by (<xref ref-type="bibr" rid="B14">Deng et al., 2012</xref>),<disp-formula id="equ1">
<mml:math id="m1">
<mml:mrow>
<mml:msub>
<mml:mi>C</mml:mi>
<mml:mi>r</mml:mi>
</mml:msub>
<mml:mo>&#x3d;</mml:mo>
<mml:mfrac>
<mml:mi>B</mml:mi>
<mml:mrow>
<mml:mn>2</mml:mn>
<mml:mo>.</mml:mo>
<mml:mi>h</mml:mi>
</mml:mrow>
</mml:mfrac>
<mml:mo>.</mml:mo>
<mml:mrow>
<mml:mfenced open="[" close="]" separators="&#x7c;">
<mml:mrow>
<mml:mn>1</mml:mn>
<mml:mo>&#x2212;</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:msub>
<mml:mi>A</mml:mi>
<mml:mi>c</mml:mi>
</mml:msub>
</mml:mrow>
<mml:mrow>
<mml:mi>A</mml:mi>
</mml:mrow>
</mml:mfrac>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:math>
</disp-formula>where h is the effective height of the structure (height of center of gravity of the structure from the base of the footing). Arias intensity (I<sub>a</sub>) of the ground motion is essentially the numerical integration of earthquake ground acceleration in time domain. The effects of number of cycles of earthquake loading, amplitude of cycles, frequency content and duration are combined in I<sub>a</sub>, and it is defined as (<xref ref-type="bibr" rid="B40">Kramer, 1996</xref>),<disp-formula id="equ2">
<mml:math id="m2">
<mml:mrow>
<mml:msub>
<mml:mi>I</mml:mi>
<mml:mi>a</mml:mi>
</mml:msub>
<mml:mo>&#x3d;</mml:mo>
<mml:mfrac>
<mml:mi>&#x3c0;</mml:mi>
<mml:mrow>
<mml:mn>2</mml:mn>
<mml:mo>.</mml:mo>
<mml:mi>g</mml:mi>
</mml:mrow>
</mml:mfrac>
<mml:mstyle displaystyle="true">
<mml:munderover>
<mml:mo>&#x222b;</mml:mo>
<mml:mn>0</mml:mn>
<mml:msub>
<mml:mi>t</mml:mi>
<mml:mrow>
<mml:mi>f</mml:mi>
<mml:mi>i</mml:mi>
<mml:mi>n</mml:mi>
</mml:mrow>
</mml:msub>
</mml:munderover>
</mml:mstyle>
<mml:mrow>
<mml:msup>
<mml:mrow>
<mml:mfenced open="[" close="]" separators="&#x7c;">
<mml:mrow>
<mml:mi>a</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:mi>t</mml:mi>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
<mml:mi>d</mml:mi>
<mml:mi>t</mml:mi>
</mml:mrow>
</mml:mrow>
</mml:math>
</disp-formula>where a(t) is horizontal ground acceleration as a function of time (t), g is the gravitational acceleration, and t<sub>fin</sub> is the duration of earthquake.</p>
<p>
<xref ref-type="fig" rid="F2">Figure 2</xref> presents the results for experimentally measured normalized permanent settlement (NS) of rocking foundations obtained from 140 individual experiments. The results presented in <xref ref-type="fig" rid="F2">Figure 2</xref> are grouped based on the rocking coefficient (C<sub>r</sub>) of foundation and type of soil. As both NS and I<sub>a</sub> depend on the amplitude, number of cycles and duration of earthquake loading, the NS results are plotted as a function of I<sub>a</sub> in <xref ref-type="fig" rid="F2">Figure 2</xref>. For a given C<sub>r</sub> range and soil type, NS seems to increase with I<sub>a</sub>; however, the variability in data indicates the presence of the effects of other variables. Another observation is that the NS in clayey soil foundations are smaller than in sandy soil foundations with the same C<sub>r</sub> range (C<sub>r</sub> <italic>&#x3e;</italic> 0.2). This is consistent with the findings of recently published results on rocking-induced settlement in shallow foundations supported by clayey soils during slow lateral cyclic loading (<xref ref-type="bibr" rid="B46">Sharma and Deng, 2019</xref>; <xref ref-type="bibr" rid="B47">Sharma and Deng, 2020</xref>). When the data presented in <xref ref-type="fig" rid="F2">Figure 2</xref> are divided into three groups (based on their C<sub>r</sub> values and soil type) and are fit using a statistics-based simple linear regression model, they yield coefficients of determination (<italic>R</italic>
<sup>2</sup>) values that are smaller than 0.35. This indicates that purely statistics-based models are not capable of capturing the permanent settlement of shallow foundations satisfactorily.</p>
<fig id="F2" position="float">
<label>FIGURE 2</label>
<caption>
<p>Experimental results of normalized settlement of rocking foundations used in the development of machine learning models.</p>
</caption>
<graphic xlink:href="fbuil-10-1402619-g002.tif"/>
</fig>
</sec>
<sec id="s3-3">
<title>3.3 Input features for machine learning models</title>
<p>In addition to the above-mentioned variables (C<sub>r</sub>, I<sub>a</sub> and type of soil), rocking induced settlement of foundations also depends on A/A<sub>c</sub>, h/B, and peak ground acceleration of the earthquake (a<sub>max</sub>). All of these six variables are chosen as the input features for the ML models developed in this study. The selection of input features is based on the experimentally observed relationships between NS and the input feature parameters found in previously published results (<xref ref-type="bibr" rid="B28">Gajan et al., 2021</xref>). The input feature selection is further justified in <xref ref-type="sec" rid="s5-2">Section 5.2</xref>: Sensitivity of ML models to input features. <xref ref-type="fig" rid="F3">Figure 3</xref> plots the statistical distributions of five input features showing the variation (numerical range) of each of the input feature extracted from the experimental database. The box plots present the mean and median along with the 10th, 25th, 75th and 90th percentile values of each of the five input features used in the development of ML models. <xref ref-type="table" rid="T1">Table 1</xref> summarizes the range of values, mean and coefficient of variation (COV) of all six input features and prediction parameter (NS). The type of soil is represented by a binary variable: 0 for sandy soil foundations and one for clayey soil foundations. As the variation of I<sub>a</sub> and NS are relatively high (I<sub>a</sub> varies from 0.03&#xa0;m/s to 26.4&#xa0;m/s, while the NS values are in the range of 10<sup>&#x2212;5</sup> to 10<sup>&#x2212;1</sup>), these two parameters are transformed to log-scale (base 10). In addition, all the input feature values are normalized in such a way that the values of each input feature vary between 0.0 and 1.0.</p>
<fig id="F3" position="float">
<label>FIGURE 3</label>
<caption>
<p>Statistical distributions (variations of experimental data) of five input feature parameters used in the development of machine learning models.</p>
</caption>
<graphic xlink:href="fbuil-10-1402619-g003.tif"/>
</fig>
<table-wrap id="T1" position="float">
<label>TABLE 1</label>
<caption>
<p>Statistical distributions of input features and prediction parameter (normalized settlement, NS) used in this study.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">Parameter</th>
<th align="center">Range</th>
<th align="center">Mean</th>
<th align="center">Coeff. of variation</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">A/A<sub>c</sub>
</td>
<td align="center">1.9&#x2013;17.1</td>
<td align="center">8.2</td>
<td align="center">0.52</td>
</tr>
<tr>
<td align="center">h/B</td>
<td align="center">1.2&#x2013;2.8</td>
<td align="center">1.9</td>
<td align="center">0.28</td>
</tr>
<tr>
<td align="center">C<sub>r</sub>
</td>
<td align="center">0.08&#x2013;0.36</td>
<td align="center">0.24</td>
<td align="center">0.33</td>
</tr>
<tr>
<td align="center">a<sub>max</sub> (g)</td>
<td align="center">0.04&#x2013;1.28</td>
<td align="center">0.43</td>
<td align="center">0.60</td>
</tr>
<tr>
<td align="center">I<sub>a</sub> (m/s)</td>
<td align="center">0.03&#x2013;26.4</td>
<td align="center">2.31</td>
<td align="center">1.90</td>
</tr>
<tr>
<td align="center">Type</td>
<td align="center">0 or 1</td>
<td align="center">n/a</td>
<td align="center">n/a</td>
</tr>
<tr>
<td align="center">NS [ &#x3d; s/B]</td>
<td align="center">1.17 &#xd7; 10<sup>&#x2212;5</sup>&#x2013;1.31 &#xd7; 10<sup>&#x2212;1</sup>
</td>
<td align="center">0.011</td>
<td align="center">1.57</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
</sec>
<sec sec-type="materials|methods" id="s4">
<title>4 Materials and methods</title>
<sec id="s4-1">
<title>4.1 Base machine learning algorithms</title>
<p>Three base ML algorithms are considered in this study: k-nearest neighbors regression (KNN), support vector regression (SVR), and decision-tree regression (DTR). The KNN algorithms works by learning to predict the output based on the test data point&#x2019;s nearest neighbors in training dataset (using the input feature values as distance measures), their output values, and their distance from the test data point. The number of neighbors to consider and the method of calculating the distance between two data points in a multi-dimensional input feature space are hyperparameters of the KNN model. The SVR algorithms makes the predictions by learning a hyperplane with a margin using training dataset in multi-dimensional space. The hyperparameters of SVR model include the margin of the hyperplane and a penalty parameter called C that determines the magnitude of tolerance used to adjust the margin (to accommodate datasets that have outliers). The DTR algorithms builds a tree-like data structure based on the input feature values and outputs of the training dataset and then makes predictions on test data using this tree. The maximum depth of the tree and the error criteria used to split the training data to build the tree (to create leaves) are the key hyperparameters of the DTR model.</p>
</sec>
<sec id="s4-2">
<title>4.2 Ensemble machine learning algorithms</title>
<p>Three ensemble ML algorithms are considered in this study: stacking, bagging, and boosting. Stacking model combines the predictions of multiple well-performing base ML models. In the process, the stacked model harnesses the best characteristics of the base models and makes predictions that are better than those of the base models. In this study, the predictions from KNN and SVR models are combined using linear regression as the meta model to create a stacking ensemble model. The training data for the stacked model consist of the outputs (predictions) of the base models and the actual, expected outputs. During testing, the stacked model combines the predictions of base models on test data using the trained linear regression meta model to make the prediction. The bagging and boosting ensemble techniques are implemented using random forest regression (RFR) and adaptive boosting regression (ABR), respectively. In both cases, multiple base DTR models are combined to create the ensemble model. The RFR model builds multiple individual trees (DTR models) of different depths and using random subsets of input features and then simply combines them together in such a way that the final prediction of the RFR model is the average value of each base DTR models in the ensemble. The ABR model also builds multiple DTR models, but sequentially, in such a way that the succeeding DTR models attempt to correct the error made by their preceding trees in the ensemble. The ABR model uses two sets of weights (data instance weights and predictor weights) and the final prediction of ABR model on test data is a weighted average value of the predictions of each base DTR models in the ensemble. The major hyperparameter of the RFR model include the number of trees in the ensemble and the maximum input features to consider when building an individual tree. The major hyperparameter of the ABR model include the number of trees in the ensemble and the learning rate of the model.</p>
</sec>
<sec id="s4-3">
<title>4.3 Artificial neural network model</title>
<p>
<xref ref-type="fig" rid="F4">Figure 4</xref> schematically illustrates the architecture of the sequential, fully-connected, multi-layer perceptron artificial neural network (ANN) regression model considered in this study. While the number of neurons in the input layer (six, one for each input feature) and output layer (one for the prediction parameter, NS) are fixed, the number of hidden layers and the number of neurons in each hidden layer are varied systematically using hyperparameter turning and grid search to obtain their optimum values for the problem considered. The commonly used stochastic gradient descent (SGD) algorithm is used with the feed-forward, back-propagation algorithm to train the ANN models. As shown in <xref ref-type="fig" rid="F4">Figure 4</xref>, the neurons in the input layer simply pass the input features to all the neurons in the first hidden layer. The outputs (y) of the neurons in the hidden layers are computed using the following relationship based on the inputs (X), network connection weights (W), bias parameters (b), and an activation function (g) (<xref ref-type="bibr" rid="B31">Geron, 2019</xref>; <xref ref-type="bibr" rid="B12">Deitel and Deitel, 2020</xref>).<disp-formula id="equ3">
<mml:math id="m3">
<mml:mrow>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>&#x3d;</mml:mo>
<mml:mi>g</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msubsup>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>j</mml:mi>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>k</mml:mi>
</mml:msubsup>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="&#x7c;">
<mml:mrow>
<mml:msub>
<mml:mi>W</mml:mi>
<mml:mrow>
<mml:mi>j</mml:mi>
<mml:mo>,</mml:mo>
<mml:mi>i</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>X</mml:mi>
<mml:mi>j</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>b</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:math>
</disp-formula>
</p>
<fig id="F4" position="float">
<label>FIGURE 4</label>
<caption>
<p>Schematic of the artificial neural network architecture utilized in this study.</p>
</caption>
<graphic xlink:href="fbuil-10-1402619-g004.tif"/>
</fig>
<p>The network is first trained using the training data and back-propagation algorithm and the optimum values for the network connection weights are found. When making a prediction on test data, the ANN model propagates the test data instance using forward-propagation and computes the output using the above equation with the optimum connection weights.</p>
</sec>
<sec id="s4-4">
<title>4.4 Flowchart of research methodology</title>
<p>
<xref ref-type="fig" rid="F5">Figure 5</xref> presents the flowchart of the research methodology of this study. The experimental data is first randomly split into training dataset and testing dataset using a 70%&#x2013;30% split. Results from a total of 140 experiments are considered in this study (<xref ref-type="fig" rid="F2">Figure 2</xref>) and this split yields 98 training data instances and 42 testing instances. First, the ML models are trained using the training dataset and the hyperparameters of the ML models are tuned using k-fold cross validation tests considering training dataset only. The results of training error and hyperparameter turning presented in this paper are obtained from this phase of the research process. Second, the trained ML models with optimum values of hyperparameters are tested using the testing dataset and the results for testing error are obtained using this phase. Finally, to compare the overall performance of ML models, the ML models with the optimum hyperparameters are evaluated using k-fold cross validation tests considering entire dataset. For both k-fold cross validations, repeated 5-fold cross validation tests are carried out (with number of repeats being equal to 3). Mean absolute percentage error (MAPE) and mean absolute error (MAE) are used to evaluate the performance of ML models by comparing their predictions of NS with experimental results. MAE quantifies the error by averaging the absolute difference between predicted and actual (experimental) values for NS, while MAPE quantifies the average error by normalizing the absolute difference between predicted and actual values by the actual value of NS. It should be noted that a multivariate linear regression (MLR) ML model is also developed for the purpose of comparison of results and performance of all the ML models. All the ML models are implemented in Python programming platform using the functional classes available in Scikit-Learn (<ext-link ext-link-type="uri" xlink:href="https://scikit-learn.org/stable/">https://scikit-learn.org/stable/</ext-link>) and TensorFlow and Keras (<ext-link ext-link-type="uri" xlink:href="https://keras.io/">https://keras.io/</ext-link>) libraries (<xref ref-type="bibr" rid="B31">Geron, 2019</xref>; <xref ref-type="bibr" rid="B12">Deitel and Deitel, 2020</xref>).</p>
<fig id="F5" position="float">
<label>FIGURE 5</label>
<caption>
<p>Flowchart of research methodology showing the sequence of key processes.</p>
</caption>
<graphic xlink:href="fbuil-10-1402619-g005.tif"/>
</fig>
</sec>
</sec>
<sec sec-type="results|discussion" id="s5">
<title>5 Results and discussion</title>
<sec id="s5-1">
<title>5.1 Initial evaluation of base and ensemble machine learning models</title>
<p>
<xref ref-type="fig" rid="F6">Figure 6</xref> presents the testing results of four base ML models: MLR, KNN, SVR and DTR. For all four models, the predicted results of NS are plotted on <italic>y</italic>-axes against the experimental results on <italic>x</italic>-axes along with 1:1 comparison lines. It should be noted that the hyperparameters of all the models are kept at their optimum values for these predictions (please see <xref ref-type="sec" rid="s5-3">Section 5.3</xref>). The testing MAPE values, calculated using 48 testing data results, are also included in <xref ref-type="fig" rid="F6">Figure 6</xref> for all four models. As seen in the figure, the KNN model (MAPE &#x3d; 0.48) performs better than the other models in terms of accuracy of predictions and it is followed by SVR model (MAPE &#x3d; 0.51). The base DTR model (a single decision tree) shows poor performance (MAPE &#x3d; 0.77) during testing phase, even worse than the baseline MLR model (MAPE &#x3d; 0.70). <xref ref-type="fig" rid="F7">Figure 7</xref> presents the testing results of Stacking, RFR and ABR ensemble ML models. KNN and SVR models are combined using linear regression meta-model to build the Stacking model. Other combinations of stacking were also tested; however, their performance did not improve. For RFR and ABR models, 100 base DTR models are combined using bagging and boosting techniques, respectively. The prediction accuracy of RFR and ABR ensemble models on test data is improved significantly (MAPE &#x3d; 0.45 and 0.53) when compared to a single DTR model (about 30%&#x2013;40% improvement in prediction accuracy). From the results presented in <xref ref-type="fig" rid="F7">Figure 7</xref>, it appears that the Stacking model is not effective (MAPE of Stack model is 0.5 whereas the MAPE of KNN alone is 0.48). However, in final k-fold cross validation tests of models (presented in <xref ref-type="sec" rid="s5-7">Section 5.7</xref>), the effectiveness of Stacking model becomes apparent.</p>
<fig id="F6" position="float">
<label>FIGURE 6</label>
<caption>
<p>Machine learning model predictions of normalized settlement (NS) during initial testing of four base-ML models: <bold>(A)</bold> MLR, <bold>(B)</bold> KNN, <bold>(C)</bold> SVR and <bold>(D)</bold> DTR.</p>
</caption>
<graphic xlink:href="fbuil-10-1402619-g006.tif"/>
</fig>
<fig id="F7" position="float">
<label>FIGURE 7</label>
<caption>
<p>Machine learning model predictions of normalized settlement (NS) during initial testing of three ensemble-ML models: <bold>(A)</bold> Stack, <bold>(B)</bold> RFR and <bold>(C)</bold> ABR.</p>
</caption>
<graphic xlink:href="fbuil-10-1402619-g007.tif"/>
</fig>
<p>For the purpose of comparison, if a model always predicted the mean NS value (zero rule algorithm), it would yield an MAPE of 32.5 when tested on entire NS dataset. If a statistics-based simple linear regression model were run through the entire NS dataset, in log(NS)&#x2013;log(I<sub>a</sub>) space, it would yield an MAPE of 8.5 when tested on entire NS dataset. It should be noted that all the MAPE and MAE values are calculated based on the actual values of NS (not in log scale). These comparisons show that (i) there is significant scatter and randomness in NS data and (ii) when compared to the above-mentioned simple modes, the ML models presented in this paper perform better by an order of magnitude.</p>
</sec>
<sec id="s5-2">
<title>5.2 Sensitivity of machine learning models to input features</title>
<p>The RFR model&#x2019;s &#x201c;feature importances&#x201d; function in Scikit-Learn quantifies the significance of each input feature based on how an input feature reduces uncertainty in data (as the nodes split the training dataset into smaller subsets while building the trees). The feature importance scores are normalized in such a way that the summation of feature importance scores of all input features is equal to 1.0. To investigate the effect of input features chosen in this study, twenty different RFR models are built by randomly selecting twenty different training datasets and the feature importance values are computed for each RFR model. The mean and standard deviation of the normalized feature importance scores are plotted in <xref ref-type="fig" rid="F8">Figure 8</xref> for each input feature. The rocking coefficient (C<sub>r</sub>) has the greatest effect in reducing the uncertainty in data with a feature importance score of about 28%, and it is followed by slenderness ratio (h/B) and critical contact area ratio (A/A<sub>c</sub>). This indicates that the geometry of the foundation and structure and bearing capacity of soil contribute more to the prediction of rocking-induced permanent settlement than the properties of earthquake ground motion. At the same time, the normalized feature importance scores of ground motion intensity parameters are around 10% each, indicating that they cannot be considered as redundant input features. The standard deviations of all the feature importance scores are less than 2% across 20 different random selection of training datasets, indicating the consistency of the RFR models built and the consistency of influence of each individual input feature on the prediction of NS.</p>
<fig id="F8" position="float">
<label>FIGURE 8</label>
<caption>
<p>Significance of input features in terms normalized feature importance scores in the construction of RFR models.</p>
</caption>
<graphic xlink:href="fbuil-10-1402619-g008.tif"/>
</fig>
</sec>
<sec id="s5-3">
<title>5.3 Sensitivity of machine learning models to major hyperparameters</title>
<p>The k-fold cross validation technique is used to tune the hyperparameters of ML models. Instead of relying on just one value for MAPE, the k-fold cross validation technique uses multiple splits of data to obtain an average value for testing MAPE considering multiple different training datasets and testing datasets. It should be noted that only the initial training dataset is used for this k-fold cross validation tests to tune the hyperparameters (i.e., the multiple, reshuffled train-test split of data for this process considers only the training dataset as shown in the flowchart presented in <xref ref-type="fig" rid="F5">Figure 5</xref>). In this study, 5-fold cross validation tests are carried out with three random reshuffling of data (i.e., 15 values for testing MAPE).</p>
<p>
<xref ref-type="fig" rid="F9">Figure 9</xref> presents the variation of testing MAPE on <italic>y</italic>-axes and the values of hyperparameters on <italic>x</italic>-axes for different ML models, with every data point in the figures representing the average of 15 testing MAPE values. Based on the results presented in <xref ref-type="fig" rid="F9">Figures 9A&#x2013;C</xref>, the optimum value for k of KNN model, the optimum value for C of SVR model, and the optimum value for maximum depth of tree of DTR model are selected to be 3, 20 and 6, respectively. The aforementioned values are chosen for hyperparameters to minimize the testing MAPE values and to avoid overfitting and underfitting the training data. For example, any value smaller than the optimum value of k in KNN model, a value greater than the optimum value for C in SVR model, and a depth greater than the optimum value for maximum depth of DTR model would all overfit the training data. The opposite is true for underfitting the training data. For RFR and ABR models, the number of trees in the ensemble is varied while the maximum depth of the tree is fixed at 6. Based on the trend shown in <xref ref-type="fig" rid="F9">Figure 9D</xref>, the optimum value for number of trees in both RFR and ABR ensemble is selected to be 100. It should be noted that, for the problem considered, the ML model predictions are not as sensitive to the other hyperparameters and they are set at their default values (the margin in SVR model &#x3d; 0.1, maximum number of random features considered while building a tree in RFR model &#x3d; 2, and the learning rate of ABR model &#x3d; 0.1). Apart from hyperparameter tuning results, all other results of ML models presented in this paper are obtained using the optimum values of hyperparameters.</p>
<fig id="F9" position="float">
<label>FIGURE 9</label>
<caption>
<p>Hyperparameter tuning results: Average MAPE in k-fold cross validation tests on training dataset versus the hyperparameters of ML models: <bold>(A)</bold> KNN, <bold>(B)</bold> SVR, <bold>(C)</bold> DTR, and <bold>(D)</bold> RFR and ABR.</p>
</caption>
<graphic xlink:href="fbuil-10-1402619-g009.tif"/>
</fig>
</sec>
<sec id="s5-4">
<title>5.4 Initial evaluation of neural network models</title>
<p>ANN models with different architecture are developed, trained and tested. While the main structure of the ANN models is kept the same (multi-layer perceptron, fully-connected, sequential ANN models), the number of hidden layers and the number of neurons in each hidden layer are varied. The training and testing results of one of the ANN models are presented in <xref ref-type="fig" rid="F10">Figures 10A, B</xref>, respectively. The same training dataset and testing dataset (same as the ones used for other ML models, described in <xref ref-type="sec" rid="s5-1">section 5.1</xref>) are used to obtain the results presented in <xref ref-type="fig" rid="F10">Figure 10</xref>. This particular ANN model has only one hidden layer with 20 neurons in the hidden layer (this is the optimum architecture obtained for the problem considered, as described in <xref ref-type="sec" rid="s5-5">Section 5.5</xref>). During training, the ANN model starts with random values for network connection weights and it adjusts the weights using stochastic gradient descent algorithm until the error reaches a minimum. This particular results are obtained from one such random initialization of network connection weights. The final k-fold cross validation results presented in <xref ref-type="sec" rid="s5-7">Section 5.7</xref> removes the effects of this randomness by repeating the process multiple times and evaluating the average performance of the model. The testing MAPE of the ANN model is 0.46, which places the ANN model above all other ML models developed in this study in terms of prediction accuracy. It is interesting to note that the training MAPE of the ANN model is greater than the testing MAPE, which is not common in supervised machine learning.</p>
<fig id="F10" position="float">
<label>FIGURE 10</label>
<caption>
<p>Comparisons of ANN model predictions with experimental values of normalized settlement (NS) during: <bold>(A)</bold> initial training phase and <bold>(B)</bold> initial testing phase.</p>
</caption>
<graphic xlink:href="fbuil-10-1402619-g010.tif"/>
</fig>
</sec>
<sec id="s5-5">
<title>5.5 Hyperparameter tuning of neural network models</title>
<p>The hyperparameters of the ANN model are tuned using 5-fold cross validation tests on training dataset. The number of hidden layers (L), the number of neurons in each hidden layer (N), the learning rate, and the number of epochs (iterations) are varied using grid search technique to optimize the values of these hyperparameters for the problem considered. The results obtained for average MAPE in 5-fold cross validation tests with the variation of hyperparameters are presented in <xref ref-type="fig" rid="F11">Figure 11</xref> for selected cases. As can be seen from <xref ref-type="fig" rid="F11">Figure 11A</xref>, a shallow ANN model with only one hidden layer turns out to be the optimum for the problem considered. This is interesting, but not unusual. Research literature on neural networks suggest that a shallow network with only one hidden layer could, in theory, model even complicated, nonlinear data, provided that it has enough number of neurons in the hidden layer (<xref ref-type="bibr" rid="B31">Geron, 2019</xref>). <xref ref-type="fig" rid="F11">Figure 11B</xref> shows that the accuracy of ANN model increases as the number of neurons in the hidden layer increases (average MAPE decreases); however the improvement in accuracy is not significant when the number of neurons increases beyond 20. In order to keep the model as simple as possible (least complexity) without scarifying the accuracy significantly, the optimum values for number of hidden layers and number of neurons in the hidden layer are selected to be 1 and 20, respectively. Similarly, based on the trends presented in <xref ref-type="fig" rid="F11">Figures 11C, D</xref>, the optimum values for the learning rate of the SGD algorithm and number of echoes are selected as 0.01 and 200, respectively.</p>
<fig id="F11" position="float">
<label>FIGURE 11</label>
<caption>
<p>Hyperparameter tuning results of ANN model: Average MAPE in k-fold cross validation tests on training dataset versus hyperparameters of ANN model: <bold>(A)</bold> number of hidden layers, <bold>(B)</bold> number of neurons in each hidden layer, <bold>(C)</bold> number of epochs and <bold>(D)</bold> learning rate.</p>
</caption>
<graphic xlink:href="fbuil-10-1402619-g011.tif"/>
</fig>
</sec>
<sec id="s5-6">
<title>5.6 Effect of initial train-test split of data</title>
<p>
<xref ref-type="fig" rid="F12">Figure 12A</xref> presents a summary of training and testing MAPE of different ML models in the prediction of NS during initial evaluation of models. In <xref ref-type="fig" rid="F12">Figure 12A</xref>, training error represents the performance of ML models when they are tested using the training dataset that is used to train the models (to quantify how much the models have learned from the training process). The consistency between the performances of different ML models are apparent: except for the baseline MLR model, all five nonlinear models included in <xref ref-type="fig" rid="F12">Figure 12A</xref> have a testing MAPE that vary between 0.45 and 0.53. The training errors of these five models are smaller than the testing errors (this is expected especially when the data size is relatively small). Note that the training error for KNN model is not applicable, as KNN model stores the entire training dataset during training phase (the MAPE of distance-weighted KNN would be 0.0, if tested with the training data).</p>
<fig id="F12" position="float">
<label>FIGURE 12</label>
<caption>
<p>Mean absolute percentage error (MAPE) during training and testing of ML models: <bold>(A)</bold> initial random train-test split of dataset and <bold>(B)</bold> second random train-test split of dataset.</p>
</caption>
<graphic xlink:href="fbuil-10-1402619-g012.tif"/>
</fig>
<p>To investigate the effect of initial training and testing split of dataset, a second train-test split of data is created using a different value for the random state variable in the function used to split the data in scikit-learn. <xref ref-type="fig" rid="F12">Figure 12B</xref> presents the training and testing errors of ML model when they are trained and tested on the second random split of train-test data. The training errors for both splits of data are relatively comparable, however the testing error on the second split of data of all ML models are noticeably greater than those on the first (initial) split of data. This indicates a bias in the initial split of data (especially testing dataset). To eliminate or reduce the bias resulting from a single train-test split of data, k-fold cross validation tests are carried out considering multiple random splits of the entire dataset. The results of this final k-fold cross validation tests are presented in next section.</p>
</sec>
<sec id="s5-7">
<title>5.7 Overall comparison of model performances</title>
<p>In order to compare the overall performance of all ML models developed in this study, final k-fold cross validation tests are carried out considering the entire dataset (5 folds with 3 repeats). For this purpose, the hyperparameters of all ML models are set at their optimum values and the models are trained and tested using multiple splits of dataset (please see the flowchart presented in <xref ref-type="fig" rid="F5">Figure 5</xref>). <xref ref-type="fig" rid="F13">Figure 13</xref> presents the average testing MAPE of all ML models obtained during this k-fold cross validation tests along with the standard deviations of MAPE of each model as bar plots. As shown in <xref ref-type="fig" rid="F13">Figure 13</xref>, the average MAPE of all nonlinear ML models are smaller than the baseline, linear MLR model for the prediction of NS. Except for the SVR model, the average MAPE of all the nonlinear ML models are smaller than 0.8. The average MAPE values of five nonlinear models (all but SVR) varies between 0.64 and 0.79, indicating the consistency in the performance of the ML models developed, though the models have different inductive biases (the assumptions based on which they learn or their learning objectives). The stacking ensemble model, which has the best average accuracy in final k-fold cross validation tests, improves the accuracy of prediction by about 33% when compared to the baseline MLR model (MAPE of 0.64 <italic>versus</italic> 0.96). <xref ref-type="fig" rid="F14">Figure 14</xref> presents the results of testing MAE in the predictions of NS in final k-fold cross validation tests in the same format as in <xref ref-type="fig" rid="F13">Figure 13</xref>. <xref ref-type="fig" rid="F14">Figure 14</xref> indicates that the trend of MAE of different ML models follows a similar pattern as in <xref ref-type="fig" rid="F13">Figure 13</xref> for MAPE. The average MAE of all six nonlinear ML models vary between 0.005 and 0.006, once again indicating consistency among different ML models. This also implies that the ML models developed in this study have the potential to predict permanent settlement of rocking foundations with reasonable accuracy in practical applications.</p>
<fig id="F13" position="float">
<label>FIGURE 13</label>
<caption>
<p>Summary results for the average and standard deviation of MAPE of ML models in final k-fold cross validation tests.</p>
</caption>
<graphic xlink:href="fbuil-10-1402619-g013.tif"/>
</fig>
<fig id="F14" position="float">
<label>FIGURE 14</label>
<caption>
<p>Summary results for the average and standard deviation of MAE of ML models in final k-fold cross validation tests.</p>
</caption>
<graphic xlink:href="fbuil-10-1402619-g014.tif"/>
</fig>
<p>
<xref ref-type="table" rid="T2">Table 2</xref> lists the values of average MAPE and MAE of all ML models during final k-fold cross validation tests. The MAPE of the statistics-based (non-ML) simple linear regression model for rocking-induced settlement is 8.5 (described in <xref ref-type="sec" rid="s5-1">Section 5.1</xref>). The ML models developed in this study to predict the permanent settlement of rocking foundations improve that accuracy by 89%&#x2013;92%. Also included in <xref ref-type="table" rid="T2">Table 2</xref> are average values of a_20 index and a_50 index of model predictions in final k-fold cross validation tests. The a_20 index is defined as the ratio of number of predictions that fall within &#xb1;20% of the actual experimental values divided by the total number of predictions (<xref ref-type="bibr" rid="B9">Asteris et al., 2021</xref>). The a_50 index is defined in a similar way to quantify the ratio of predictions that fall within &#xb1;50% of the actual experimental values. The a_20 index of all ML models developed in this study, except the Stacking model, varies between 0.20 and 0.23. The relatively small values of a_20 index reflect the difficulty in predicting the rocking induced settlement accurately. It is interesting to note that the a_20 index of the Stacking model is the smallest (0.179) although its average MAPE is the best among all ML models in final k-fold cross validation tests. This suggests that Stacking model reduces the error in outliers of model predictions while the accuracy of model predictions that are closer to the actual values are not particularly high. Similarly, the a_50 index of the SVR model is the greatest (0.671) of all ML models while its average MAPE and MAE indicate that it is the least effective of all nonlinear models in terms of overall average accuracy. This suggests that most of the SVR model predictions are close to the actual values, while it produces relatively more outliers in predictions thus reducing the overall MAE and MAPE of predictions.</p>
<table-wrap id="T2" position="float">
<label>TABLE 2</label>
<caption>
<p>Summary of average MAPE, MAE, a_20 index, and a_50 index of machine learning models in final k-fold cross validation tests of models.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">Model</th>
<th align="center">Ave. MAPE</th>
<th align="center">Ave. MAE</th>
<th align="center">Ave. a_20 index</th>
<th align="center">Ave. a_50 index</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">MLR</td>
<td align="center">0.957</td>
<td align="center">0.0073</td>
<td align="center">0.205</td>
<td align="center">0.445</td>
</tr>
<tr>
<td align="center">SVR</td>
<td align="center">0.864</td>
<td align="center">0.0060</td>
<td align="center">0.226</td>
<td align="center">0.671</td>
</tr>
<tr>
<td align="center">KNN</td>
<td align="center">0.731</td>
<td align="center">0.0048</td>
<td align="center">0.217</td>
<td align="center">0.560</td>
</tr>
<tr>
<td align="center">Stack</td>
<td align="center">0.638</td>
<td align="center">0.0054</td>
<td align="center">0.179</td>
<td align="center">0.500</td>
</tr>
<tr>
<td align="center">RFR</td>
<td align="center">0.753</td>
<td align="center">0.0053</td>
<td align="center">0.231</td>
<td align="center">0.593</td>
</tr>
<tr>
<td align="center">ABR</td>
<td align="center">0.743</td>
<td align="center">0.0056</td>
<td align="center">0.226</td>
<td align="center">0.586</td>
</tr>
<tr>
<td align="center">ANN</td>
<td align="center">0.790</td>
<td align="center">0.0058</td>
<td align="center">0.219</td>
<td align="center">0.531</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
</sec>
<sec sec-type="conclusion" id="s6">
<title>6 Conclusion</title>
<p>The major achievement of this study is the development of multiple machine learning-based predictive models for settlement of shallow foundations due to rocking during earthquake loading. Though these ML models are trained and tested on a limited amount of available experimental data, they show promising predictive capabilities and they could possibly learn more and get even better in terms of their accuracy of predictions as more experimental data become available in the future. These ML models can be used with other analytical and numerical models and empirical relationships as complementary measures for estimating permanent settlement in practical applications of rocking foundations. The ML models presented here are (i) validated using experimental results, (ii) relatively easy to use (only six input features) and (iii) relatively fast and efficient compared to detailed finite element based modeling procedures. The major, specific conclusions drawn from this study include the following.<list list-type="simple">
<list-item>
<p>&#x2022; Given the values of six input features (three rocking system capacity parameters, one binary feature for soil type and two earthquake ground motion parameters) that are relatively easily obtainable for foundation design in majority of seismic zones, the ML models presented herein can be used to estimate the permanent settlement of rocking foundations.</p>
</list-item>
<list-item>
<p>&#x2022; The performances of all six nonlinear ML models developed in this study are relatively consistent in terms of prediction accuracy with their average MAPE varying between 0.64 and 0.86 in final k-fold cross validation tests.</p>
</list-item>
<list-item>
<p>&#x2022; The overall average MAE in predictions of all nonlinear ML models are smaller than 0.006, implying that the ML models developed in this study have the potential to predict permanent settlement of rocking foundations with reasonable accuracy in practical applications.</p>
</list-item>
<list-item>
<p>&#x2022; The ML models presented herein improve the accuracy of prediction by about 90% in comparison to a statistics based (non-ML) simple linear regression model (with MAPE &#x3d; 8.5). In addition, the stacking ensemble model, which has the best average accuracy in final k-fold cross validation tests, improves the accuracy of prediction by about 33% when compared to the baseline MLR model (MAPE of 0.64 <italic>versus</italic> 0.96).</p>
</list-item>
<list-item>
<p>&#x2022; Among the ANN model architectures considered, a shallow neural network (with only one hidden layer consisting of twenty neurons) is found to be the most suitable for the dataset analyzed without overfitting or underfitting the training data.</p>
</list-item>
<list-item>
<p>&#x2022; Based on the feature importance values obtained from RFR ensemble model, it is found that the six input features chosen for ML models capture the permanent settlement of rocking foundations satisfactorily, and that the settlement of rocking foundations is more sensitive to soil-foundation system properties than to earthquake ground motion properties.</p>
</list-item>
</list>
</p>
</sec>
</body>
<back>
<sec sec-type="data-availability" id="s7">
<title>Data availability statement</title>
<p>The original contributions presented in the study are included in the article/<xref ref-type="sec" rid="s12">Supplementary Material</xref>, further inquiries can be directed to the corresponding author.</p>
</sec>
<sec id="s8">
<title>Author contributions</title>
<p>SG: Conceptualization, Data curation, Formal Analysis, Funding acquisition, Investigation, Methodology, Project administration, Resources, Software, Supervision, Validation, Visualization, Writing&#x2013;original draft, Writing&#x2013;review and editing.</p>
</sec>
<sec sec-type="funding-information" id="s9">
<title>Funding</title>
<p>The author(s) declare that financial support was received for the research, authorship, and/or publication of this article. This research is funded by the US National Science Foundation (NSF) through award number CMMI-2138631.</p>
</sec>
<sec sec-type="COI-statement" id="s10">
<title>Conflict of interest</title>
<p>The author declares that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="disclaimer" id="s11">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<sec id="s12">
<title>Supplementary material</title>
<p>The Supplementary Material for this article can be found online at: <ext-link ext-link-type="uri" xlink:href="https://www.frontiersin.org/articles/10.3389/fbuil.2024.1402619/full#supplementary-material">https://www.frontiersin.org/articles/10.3389/fbuil.2024.1402619/full&#x23;supplementary-material</ext-link>
</p>
<supplementary-material xlink:href="Table1.xlsx" id="SM1" mimetype="application/xlsx" xmlns:xlink="http://www.w3.org/1999/xlink"/>
</sec>
<sec id="s13">
<title>Abbreviations</title>
<p>ABR, Adaptive boosting regression model; a<sub>max</sub>, Peak ground acceleration of earthquake; ANN, Artificial neural network regression model; A/A<sub>c</sub>, Critical contact area ratio of rocking foundation; C<sub>r</sub>, Rocking coefficient of rocking system; GBR, Gradient boosting regression model; h/B, Slenderness ratio of rocking system; I<sub>a</sub>, Arias intensity of earthquake; KNN, k-nearest neighbors regression model; MAE, Mean absolute error; MAPE, Mean absolute percentage error; ML, Machine learning; MLR, Multivariate regression model; RFR, Random forest regression model; SVR, Support vector regression model.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Alkayem</surname>
<given-names>N. F.</given-names>
</name>
<name>
<surname>Shen</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Mayya</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Asteris</surname>
<given-names>P. G.</given-names>
</name>
<name>
<surname>Fu</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Luzio</surname>
<given-names>G. D.</given-names>
</name>
<etal/>
</person-group> (<year>2024</year>). <article-title>Prediction of concrete and FRC properties at high temperature using machine and deep learning: a review of recent advances and future perspectives</article-title>. <source>J. Build. Eng.</source> <volume>83</volume>, <fpage>108369</fpage>. <pub-id pub-id-type="doi">10.1016/j.jobe.2023.108369</pub-id>
</citation>
</ref>
<ref id="B2">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Allotey</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Naggar</surname>
<given-names>M. H.</given-names>
</name>
</person-group> (<year>2003</year>). <article-title>Analytical moment-rotation curves for rigid foundations based on a Winkler model</article-title>. <source>Soil Dyn. Earthq. Eng.</source> <volume>23</volume>, <fpage>367</fpage>&#x2013;<lpage>381</lpage>. <pub-id pub-id-type="doi">10.1016/S0267-7261(03)00034-4</pub-id>
</citation>
</ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Amjad</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Ahmad</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Ahmad</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Wroblewski</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Kaminski</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Amjad</surname>
<given-names>U.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Prediction of pile bearing capacity using XGBoost algorithm: modeling and performance evaluation</article-title>. <source>Appl. Sci.</source> <volume>9</volume>, <fpage>2126</fpage>. <pub-id pub-id-type="doi">10.3390/app12042126</pub-id>
</citation>
</ref>
<ref id="B4">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Anastasopoulos</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Gazetas</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Loli</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Apostolou</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Gerolymos</surname>
<given-names>N.</given-names>
</name>
</person-group> (<year>2010</year>). <article-title>Soil failure can be used for seismic protection of structures</article-title>. <source>Bull. Earthq. Eng.</source> <volume>8</volume>, <fpage>309</fpage>&#x2013;<lpage>326</lpage>. <pub-id pub-id-type="doi">10.1007/s10518-009-9145-2</pub-id>
</citation>
</ref>
<ref id="B5">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Anastasopoulos</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Loli</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Georgarakos</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Drosos</surname>
<given-names>V.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>Shaking table testing of rocking-isolated bridge pier on sand</article-title>. <source>J. Earthq. Eng.</source> <volume>17</volume>, <fpage>1</fpage>&#x2013;<lpage>32</lpage>. <pub-id pub-id-type="doi">10.1080/13632469.2012.705225</pub-id>
</citation>
</ref>
<ref id="B6">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Antonellis</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Gavras</surname>
<given-names>A. G.</given-names>
</name>
<name>
<surname>Panagiotou</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Kutter</surname>
<given-names>B. L.</given-names>
</name>
<name>
<surname>Guerrini</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Sander</surname>
<given-names>A.</given-names>
</name>
<etal/>
</person-group> (<year>2015</year>). <article-title>Shake table test of large-scale bridge columns supported on rocking shallow foundations</article-title>. <source>J. Geotech. Geoenvironmental Eng.</source> <volume>141</volume>, <fpage>0001284</fpage>. <pub-id pub-id-type="doi">10.1061/(ASCE)GT.1943-5606.0001284</pub-id>
</citation>
</ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Arabpanahan</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Mirghaderi</surname>
<given-names>S. R.</given-names>
</name>
<name>
<surname>Ghalandarzadeh</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Experimental characterization of SDOF-embedded foundation systems with asymmetric interface condition</article-title>. <source>Acta Geotech.</source> <volume>2023</volume>, <fpage>02135</fpage>&#x2013;<lpage>5</lpage>. <pub-id pub-id-type="doi">10.1007/s11440-023-02135-5</pub-id>
</citation>
</ref>
<ref id="B8">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Armaghani</surname>
<given-names>D. J.</given-names>
</name>
<name>
<surname>Asteris</surname>
<given-names>P. G.</given-names>
</name>
<name>
<surname>Fatemi</surname>
<given-names>S. A.</given-names>
</name>
<name>
<surname>Hasanipanah</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Tarinejad</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Rashid</surname>
<given-names>A. S. A.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>On the use of neuro-swarm system to forecast the pile settlement</article-title>. <source>Appl. Sci.</source> <volume>10</volume>, <fpage>1904</fpage>. <pub-id pub-id-type="doi">10.3390/app10061904</pub-id>
</citation>
</ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Asteris</surname>
<given-names>P. G.</given-names>
</name>
<name>
<surname>Koopialipoor</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Armaghani</surname>
<given-names>D. J.</given-names>
</name>
<name>
<surname>Kotsonis</surname>
<given-names>E. A.</given-names>
</name>
<name>
<surname>Lourenco</surname>
<given-names>P. B.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Prediction of cement-based mortars compressive strength using machine learning techniques</article-title>. <source>Neural comput. Appl.</source> <volume>33</volume>, <fpage>13089</fpage>&#x2013;<lpage>13121</lpage>. <pub-id pub-id-type="doi">10.1007/s00521-021-06004-8</pub-id>
</citation>
</ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bapir</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Abrahamczyk</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Wichtmann</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Prada-Sarmiento</surname>
<given-names>L. F.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Soil-structure interaction: a state-of-the-art review of modeling techniques and studies on seismic response of building structures</article-title>. <source>Front. Built Environ.</source> <volume>9</volume>, <fpage>1120351</fpage>. <pub-id pub-id-type="doi">10.3389/fbuil.2023.1120351</pub-id>
</citation>
</ref>
<ref id="B11">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chatzigogos</surname>
<given-names>C. T.</given-names>
</name>
<name>
<surname>Figini</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Pecker</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Salencon</surname>
<given-names>L.</given-names>
</name>
</person-group> (<year>2011</year>). <article-title>A macro element formulation for shallow foundations on cohesive and frictional soils</article-title>. <source>Int. J. Numer. Anal. methods Geomech.</source> <volume>35</volume>, <fpage>902</fpage>&#x2013;<lpage>931</lpage>. <pub-id pub-id-type="doi">10.1002/nag.934</pub-id>
</citation>
</ref>
<ref id="B12">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Deitel</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Deitel</surname>
<given-names>H.</given-names>
</name>
</person-group> (<year>2020</year>) <source>Introduction to Python for computer science and data science</source>. <publisher-loc>New York, NY, USA</publisher-loc>: <publisher-name>Pearson Publishing</publisher-name>.</citation>
</ref>
<ref id="B13">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Deng</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Kutter</surname>
<given-names>B. L.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>Characterization of rocking shallow foundations using centrifuge model tests</article-title>. <source>Earthq. Eng. Struct. Dyn.</source> <volume>41</volume>, <fpage>1043</fpage>&#x2013;<lpage>1060</lpage>. <pub-id pub-id-type="doi">10.1002/eqe.1181</pub-id>
</citation>
</ref>
<ref id="B14">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Deng</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Kutter</surname>
<given-names>B. L.</given-names>
</name>
<name>
<surname>Kunnath</surname>
<given-names>S. K.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>Centrifuge modeling of bridge systems designed for rocking foundations</article-title>. <source>J. Geotech. Geoenvironmental Eng.</source> <volume>138</volume>, <fpage>335</fpage>&#x2013;<lpage>344</lpage>. <pub-id pub-id-type="doi">10.1061/(ASCE)GT.1943-5606.0000605</pub-id>
</citation>
</ref>
<ref id="B15">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Diaz</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Brotons</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Tomas</surname>
<given-names>R.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Use of artificial neural networks to predict 3-D elastic settlement of foundations on soils with inclined bedrock</article-title>. <source>Soils. Found.</source> <volume>58</volume>, <fpage>1414</fpage>&#x2013;<lpage>1422</lpage>. <pub-id pub-id-type="doi">10.1016/j.sandf.2018.08.001</pub-id>
</citation>
</ref>
<ref id="B16">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Drosos</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Georgarakos</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Loli</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Anastsopoulos</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Zarzouras</surname>
<given-names>O.</given-names>
</name>
<name>
<surname>Gazetas</surname>
<given-names>G.</given-names>
</name>
</person-group> (<year>1012</year>). <article-title>Soil-foundation-structure interaction with mobilization of bearing capacity: experimental study on sand</article-title>. <source>J. Geotech. Geoenvironmental Eng.</source> <volume>138</volume>, <fpage>1369</fpage>&#x2013;<lpage>1386</lpage>. <pub-id pub-id-type="doi">10.1061/(ASCE)GT.1943-5606.0000705</pub-id>
</citation>
</ref>
<ref id="B17">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ebid</surname>
<given-names>A. M.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>35 years of (AI) in geotechnical engineering: state of the art</article-title>. <source>Geotech. Geol. Eng.</source> <volume>39</volume>, <fpage>637</fpage>&#x2013;<lpage>690</lpage>. <pub-id pub-id-type="doi">10.1007/s10706-020-01536-7</pub-id>
</citation>
</ref>
<ref id="B18">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Figini</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Paolucci</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Chatzigogos</surname>
<given-names>C. T.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>A macro-element model for non-linear soil-shallow foundation-structure interaction under seismic loads: theoretical development and experimental validation on large scale tests</article-title>. <source>Earthq. Eng. Struct. Dyn.</source> <volume>41</volume>, <fpage>475</fpage>&#x2013;<lpage>493</lpage>. <pub-id pub-id-type="doi">10.1002/eqe.1140</pub-id>
</citation>
</ref>
<ref id="B19">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gajan</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Modeling of seismic energy dissipation of rocking foundations using nonparametric machine learning algorithms</article-title>. <source>Geotechnics</source> <volume>1</volume>, <fpage>534</fpage>&#x2013;<lpage>557</lpage>. <pub-id pub-id-type="doi">10.3390/geotechnics1020024</pub-id>
</citation>
</ref>
<ref id="B20">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gajan</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Data-driven modeling of peak rotation and tipping-over stability of rocking shallow foundations using machine learning algorithms</article-title>. <source>Geotechnics</source> <volume>2</volume>, <fpage>781</fpage>&#x2013;<lpage>801</lpage>. <pub-id pub-id-type="doi">10.3390/geotechnics2030038</pub-id>
</citation>
</ref>
<ref id="B21">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gajan</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Prediction of acceleration amplification ratio of rocking foundations using machine learning and deep learning models</article-title>. <source>Appl. Sci.</source> <volume>13</volume>, <fpage>12791</fpage>. <pub-id pub-id-type="doi">10.3390/app132312791</pub-id>
</citation>
</ref>
<ref id="B22">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gajan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Godagama</surname>
<given-names>B.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Seismic performance of bridge-deck-pier-type-structures with yielding columns supported by rocking foundations</article-title>. <source>J. Earthq. Eng.</source> <volume>26</volume>, <fpage>640</fpage>&#x2013;<lpage>673</lpage>. <pub-id pub-id-type="doi">10.1080/13632469.2019.1692737</pub-id>
</citation>
</ref>
<ref id="B23">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gajan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Kutter</surname>
<given-names>B. L.</given-names>
</name>
</person-group> (<year>2008</year>). <article-title>Capacity, settlement, and energy dissipation of shallow footings subjected to rocking</article-title>. <source>J. Geotech. Geoenvironmental Eng.</source> <volume>134</volume>, <fpage>1129</fpage>&#x2013;<lpage>1141</lpage>. <pub-id pub-id-type="doi">10.1061/(ASCE)1090-0241(2008)134:8(1129)</pub-id>
</citation>
</ref>
<ref id="B24">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gajan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Kutter</surname>
<given-names>B. L.</given-names>
</name>
</person-group> (<year>2009</year>). <article-title>Contact interface model for shallow foundations subjected to combined cyclic loading</article-title>. <source>J. Geotech. Geoenvironmental Eng.</source> <volume>135</volume>, <fpage>407</fpage>&#x2013;<lpage>419</lpage>. <pub-id pub-id-type="doi">10.1061/(ASCE)1090-0241(2009)135:3(407)</pub-id>
</citation>
</ref>
<ref id="B25">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gajan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Phalen</surname>
<given-names>J. D.</given-names>
</name>
<name>
<surname>Kutter</surname>
<given-names>B. L.</given-names>
</name>
<name>
<surname>Hutchinson</surname>
<given-names>T. C.</given-names>
</name>
<name>
<surname>Martin</surname>
<given-names>G.</given-names>
</name>
</person-group> (<year>2005</year>). <article-title>Centrifuge modeling of load-deformation behavior of rocking shallow foundations</article-title>. <source>Soil Dyn. Earthq. Eng.</source> <volume>25</volume>, <fpage>773</fpage>&#x2013;<lpage>783</lpage>. <pub-id pub-id-type="doi">10.1016/j.soildyn.2004.11.019</pub-id>
</citation>
</ref>
<ref id="B26">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gajan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Raychowdhury</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Hutchinson</surname>
<given-names>T. C.</given-names>
</name>
<name>
<surname>Kutter</surname>
<given-names>B. L.</given-names>
</name>
<name>
<surname>Stewart</surname>
<given-names>J. P.</given-names>
</name>
</person-group> (<year>2010</year>). <article-title>Application and validation of practical tools for nonlinear soil-foundation interaction analysis</article-title>. <source>Earthq. Spectra</source> <volume>26</volume>, <fpage>111</fpage>&#x2013;<lpage>129</lpage>. <pub-id pub-id-type="doi">10.1193/1.3263242</pub-id>
</citation>
</ref>
<ref id="B27">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gajan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Saravanathiiban</surname>
<given-names>D. S.</given-names>
</name>
</person-group> (<year>2011</year>). <article-title>Modeling of energy dissipation in structural devices and foundation soil during seismic loading</article-title>. <source>Soil Dyn. Earthq. Eng.</source> <volume>31</volume>, <fpage>1106</fpage>&#x2013;<lpage>1122</lpage>. <pub-id pub-id-type="doi">10.1016/j.soildyn.2011.02.006</pub-id>
</citation>
</ref>
<ref id="B28">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gajan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Soundararajan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Yang</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Akchurin</surname>
<given-names>D.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Effects of rocking coefficient and critical contact area ratio on the performance of rocking foundations from centrifuge and shake table experimental results</article-title>. <source>Soil Dyn. Earthq. Eng.</source> <volume>141</volume>, <fpage>106502</fpage>. <pub-id pub-id-type="doi">10.1016/j.soildyn.2020.106502</pub-id>
</citation>
</ref>
<ref id="B29">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gavras</surname>
<given-names>A. G.</given-names>
</name>
<name>
<surname>Kutter</surname>
<given-names>B. L.</given-names>
</name>
<name>
<surname>Hakhamaneshi</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Gajan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Tsatsis</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Sharma</surname>
<given-names>K.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>Database of rocking shallow foundation performance: dynamic shaking</article-title>. <source>Earthq. Spectra</source> <volume>36</volume>, <fpage>960</fpage>&#x2013;<lpage>982</lpage>. <pub-id pub-id-type="doi">10.1177/8755293019891727</pub-id>
</citation>
</ref>
<ref id="B30">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Gavras</surname>
<given-names>A. G.</given-names>
</name>
<name>
<surname>Kutter</surname>
<given-names>B. L.</given-names>
</name>
<name>
<surname>Hakhamaneshi</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Gajan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Tsatsis</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Sharma</surname>
<given-names>K.</given-names>
</name>
<etal/>
</person-group> (<year>2023</year>). <article-title>FoRDy: rocking shallow foundation performance in dynamic experiments</article-title>. <source>DesignSafe-CI</source> <volume>2023</volume>. <pub-id pub-id-type="doi">10.13019/3rqyd929</pub-id>
</citation>
</ref>
<ref id="B31">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Geron</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2019</year>) <source>Hands-on machine learning with Scikit-Learn, Keras, and TensorFlow: concepts, tools and techniques to build intelligent systems</source>. <publisher-loc>Sebastopol, CA, USA</publisher-loc>: <publisher-name>O&#x2019;Reilly Media Inc.</publisher-name>,</citation>
</ref>
<ref id="B32">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Goh</surname>
<given-names>A. T. C.</given-names>
</name>
<name>
<surname>Goh</surname>
<given-names>S. H.</given-names>
</name>
</person-group> (<year>2007</year>). <article-title>Support vector machines: their use in geotechnical engineering as illustrated using seismic liquefaction data</article-title>. <source>Comput. Geotech.</source> <volume>34</volume>, <fpage>410</fpage>&#x2013;<lpage>421</lpage>. <pub-id pub-id-type="doi">10.1016/j.compgeo.2007.06.001</pub-id>
</citation>
</ref>
<ref id="B33">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Hakhamaneshi</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Kutter</surname>
<given-names>B. L.</given-names>
</name>
<name>
<surname>Deng</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Hutchinson</surname>
<given-names>T. C.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>W.</given-names>
</name>
</person-group> (<year>2012</year>). &#x201c;<article-title>New findings from centrifuge modeling of rocking shallow foundations in clayey ground</article-title>,&#x201d; in <conf-name>Proc. Geo-Congress 2012</conf-name>, <conf-loc>Oakland, CA, USA</conf-loc>.</citation>
</ref>
<ref id="B34">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hakhamaneshi</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Kutter</surname>
<given-names>B. L.</given-names>
</name>
<name>
<surname>Gavras</surname>
<given-names>A. G.</given-names>
</name>
<name>
<surname>Gajan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Tsatsis</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>W.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>Database of rocking shallow foundation performance: slow-cyclic and monotonic loading</article-title>. <source>Earthq. Spectra</source> <volume>36</volume>, <fpage>1585</fpage>&#x2013;<lpage>1606</lpage>. <pub-id pub-id-type="doi">10.1177/8755293020906564</pub-id>
</citation>
</ref>
<ref id="B35">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hamidpour</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Shakib</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Paolucci</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Correia</surname>
<given-names>A. A.</given-names>
</name>
<name>
<surname>Soltani</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Empirical models for the nonlinear rocking response of shallow foundations</article-title>. <source>Bull. Earthq. Eng.</source> <volume>20</volume>, <fpage>8099</fpage>&#x2013;<lpage>8122</lpage>. <pub-id pub-id-type="doi">10.1007/s10518-022-01449-1</pub-id>
</citation>
</ref>
<ref id="B36">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Irani</surname>
<given-names>A. E.</given-names>
</name>
<name>
<surname>Bonab</surname>
<given-names>M. H.</given-names>
</name>
<name>
<surname>Sarand</surname>
<given-names>F. B.</given-names>
</name>
<name>
<surname>Katebi</surname>
<given-names>H.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Overall improvement of seismic resilience by rocking foundation and trade-off implications</article-title>. <source>Int. J. Geosynth. Ground Eng.</source> <volume>9</volume>, <fpage>40</fpage>. <pub-id pub-id-type="doi">10.1007/s40891-023-00454-x</pub-id>
</citation>
</ref>
<ref id="B37">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jeremiah</surname>
<given-names>J. J.</given-names>
</name>
<name>
<surname>Abbey</surname>
<given-names>S. J.</given-names>
</name>
<name>
<surname>Booth</surname>
<given-names>C. A.</given-names>
</name>
<name>
<surname>Kashyap</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Results of application of artificial neural networks in predicting geo-mechanical properties of stabilized clays &#x2013; a review</article-title>. <source>Geotechnics</source> <volume>1</volume>, <fpage>144</fpage>&#x2013;<lpage>171</lpage>. <pub-id pub-id-type="doi">10.3390/geotechnics1010008</pub-id>
</citation>
</ref>
<ref id="B38">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Karpatne</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Atluri</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Faghmous</surname>
<given-names>J. H.</given-names>
</name>
<name>
<surname>Steinbach</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Banerjee</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Ganguly</surname>
<given-names>A.</given-names>
</name>
<etal/>
</person-group> (<year>2017</year>). <article-title>Theory-guided data science: a new paradigm for scientific discovery from data</article-title>. <source>IEEE Trans. Knowl. Data Eng.</source> <volume>29</volume>, <fpage>2318</fpage>&#x2013;<lpage>2331</lpage>. <pub-id pub-id-type="doi">10.1109/TKDE.2017.2720168</pub-id>
</citation>
</ref>
<ref id="B39">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ko</surname>
<given-names>K.-W.</given-names>
</name>
<name>
<surname>Ha</surname>
<given-names>J.-G.</given-names>
</name>
<name>
<surname>Park</surname>
<given-names>H.-J.</given-names>
</name>
<name>
<surname>Kim</surname>
<given-names>D.-S.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Centrifuge modeling of improved design for rocking foundation using short piles</article-title>. <source>J. Geotech. Geoenvironmental Eng.</source> <volume>145</volume>, <fpage>5606</fpage>&#x2013;<lpage>0002064</lpage>. <pub-id pub-id-type="doi">10.1061/(ASCE)GT.1943-5606.0002064</pub-id>
</citation>
</ref>
<ref id="B40">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Kramer</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>1996</year>) <source>Geotechnical earthquake engineering</source>. <publisher-loc>Upper Saddle River, NJ, USA</publisher-loc>: <publisher-name>Prentice Hall Inc</publisher-name>.</citation>
</ref>
<ref id="B41">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Loli</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Knappett</surname>
<given-names>J. A.</given-names>
</name>
<name>
<surname>Brown</surname>
<given-names>M. J.</given-names>
</name>
<name>
<surname>Anastasopoulos</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Gazetas</surname>
<given-names>G.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Centrifuge modeling of rocking-isolated inelastic RC bridge piers</article-title>. <source>Earthq. Eng. Struct. Dyn.</source> <volume>43</volume>, <fpage>2341</fpage>&#x2013;<lpage>2359</lpage>. <pub-id pub-id-type="doi">10.1002/eqe.2451</pub-id>
</citation>
</ref>
<ref id="B42">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mozumder</surname>
<given-names>R. A.</given-names>
</name>
<name>
<surname>Laskar</surname>
<given-names>A. I.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Prediction of unconfined compressive strength of geopolymer stabilized clayey soil using Artificial Neural Network</article-title>. <source>Comput. Geotech.</source> <volume>69</volume>, <fpage>291</fpage>&#x2013;<lpage>300</lpage>. <pub-id pub-id-type="doi">10.1016/j.compgeo.2015.05.021</pub-id>
</citation>
</ref>
<ref id="B43">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Paolucci</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Shirato</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Yilmaz</surname>
<given-names>M. T.</given-names>
</name>
</person-group> (<year>2008</year>). <article-title>Seismic behaviour of shallow foundations: shaking table experiments <italic>vs</italic> numerical modelling</article-title>. <source>Earthq. Eng. Struct. Dyn.</source> <volume>37</volume>, <fpage>577</fpage>&#x2013;<lpage>595</lpage>. <pub-id pub-id-type="doi">10.1002/eqe.773</pub-id>
</citation>
</ref>
<ref id="B44">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pelekis</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>McKenna</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Madabhushi</surname>
<given-names>G. S. P.</given-names>
</name>
<name>
<surname>DeJong</surname>
<given-names>M. J.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Finite element modeling of buildings with structural and foundation rocking on dry sand</article-title>. <source>Earthq. Eng. Struct. Dyn.</source> <volume>50</volume>, <fpage>3093</fpage>&#x2013;<lpage>3115</lpage>. <pub-id pub-id-type="doi">10.1002/eqe.3501</pub-id>
</citation>
</ref>
<ref id="B45">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pham</surname>
<given-names>B. T.</given-names>
</name>
<name>
<surname>Bui</surname>
<given-names>D. T.</given-names>
</name>
<name>
<surname>Prakash</surname>
<given-names>I.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Landslide susceptibility assessment using bagging ensemble based alternating decision trees, logistic regression and J48 decision trees methods: a comparative study</article-title>. <source>Geotech. Geol. Eng.</source> <volume>35</volume>, <fpage>2597</fpage>&#x2013;<lpage>2611</lpage>. <pub-id pub-id-type="doi">10.1007/s10706-017-0264-2</pub-id>
</citation>
</ref>
<ref id="B46">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sharma</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Deng</surname>
<given-names>L.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Characterization of rocking shallow foundations on cohesive soil using field snap-back tests</article-title>. <source>J. Geotech. Geoenvironmental Eng.</source> <volume>145</volume>, <fpage>0002114</fpage>. <pub-id pub-id-type="doi">10.1061/(ASCE)GT.1943-5606.0002114</pub-id>
</citation>
</ref>
<ref id="B47">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sharma</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Deng</surname>
<given-names>L.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Field testing of rocking foundations in cohesive soil: cyclic performance and footing mechanical response</article-title>. <source>Can. Geotech. J.</source> <volume>57</volume>, <fpage>828</fpage>&#x2013;<lpage>839</lpage>. <pub-id pub-id-type="doi">10.1139/cgj-2018-0734</pub-id>
</citation>
</ref>
<ref id="B48">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Soong</surname>
<given-names>T. T.</given-names>
</name>
<name>
<surname>Spencer</surname>
<given-names>Jr. B. F.</given-names>
</name>
</person-group> (<year>2002</year>). <article-title>Supplemental energy dissipation: state-of-the-art and state-of-the-practice</article-title>. <source>Eng. Struct.</source> <volume>24</volume>, <fpage>243</fpage>&#x2013;<lpage>259</lpage>. <pub-id pub-id-type="doi">10.1016/S0141-0296(01)00092-X</pub-id>
</citation>
</ref>
<ref id="B49">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Symans</surname>
<given-names>M. D.</given-names>
</name>
<name>
<surname>Charney</surname>
<given-names>F. A.</given-names>
</name>
<name>
<surname>Whittaker</surname>
<given-names>A. S.</given-names>
</name>
<name>
<surname>Constantinou</surname>
<given-names>M. C.</given-names>
</name>
<name>
<surname>Kircher</surname>
<given-names>C. A.</given-names>
</name>
<name>
<surname>Johnson</surname>
<given-names>M. W.</given-names>
</name>
<etal/>
</person-group> (<year>2008</year>). <article-title>Energy dissipation systems for seismic applications: current practice and recent developments</article-title>. <source>J. Struct. Eng.</source> <volume>134</volume>, <fpage>3</fpage>&#x2013;<lpage>21</lpage>. <pub-id pub-id-type="doi">10.1061/(ASCE)0733-9445(2008)134:1(3)</pub-id>
</citation>
</ref>
<ref id="B50">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tsatsis</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Anastasopoulos</surname>
<given-names>I.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Performance of rocking systems on shallow improved sand: shaking table testing</article-title>. <source>Front. Built Environ.</source> <volume>1</volume>, <fpage>00009</fpage>. <pub-id pub-id-type="doi">10.3389/fbuil.2015.00009</pub-id>
</citation>
</ref>
<ref id="B51">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Xiong</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Pei</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Qiu</surname>
<given-names>T.</given-names>
</name>
</person-group> (<year>2023</year>). &#x201c;<article-title>A machine learning-based method with integrated physics knowledge for predicting bearing capacity of pile foundations</article-title>,&#x201d; in <conf-name>Proc. Geo-Congress 2023</conf-name>, <conf-loc>Los Angeles, CA, USA</conf-loc>.</citation>
</ref>
</ref-list>
</back>
</article>