<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Psychol.</journal-id>
<journal-title>Frontiers in Psychology</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Psychol.</abbrev-journal-title>
<issn pub-type="epub">1664-1078</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpsyg.2024.1421707</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Psychology</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>The eyes as the exclamation mark of the face: exploring the relationship between eye size, intensity of female facial expressions and attractiveness in a range of emotions</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Est&#x00E9; Jaloveckas</surname> <given-names>Alan&#x00ED;s</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/2725424/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Granero</surname> <given-names>Roser</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/243798/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Faculty of Psychology, Universitat Aut&#x00F2;noma de Barcelona - UAB</institution>, <addr-line>Barcelona</addr-line>, <country>Spain</country></aff>
<aff id="aff2"><sup>2</sup><institution>Department of Psychobiology and Methodology, Universitat Aut&#x00F2;noma de Barcelona - UAB</institution>, <addr-line>Barcelona</addr-line>, <country>Spain</country></aff>
<author-notes>
<fn id="fn0001" fn-type="edited-by"><p>Edited by: Federica Marcolin, Polytechnic University of Turin, Italy</p></fn>
<fn id="fn0002" fn-type="edited-by"><p>Reviewed by: Elena Carlotta Olivetti, Polytechnic University of Turin, Italy</p>
<p>Luca Ulrich, Polytechnic University of Turin, Italy</p>
<p>Jefferson Ortega, University of California, Berkeley, United States</p></fn>
<corresp id="c001">&#x002A;Correspondence: Roser Granero, <email>roser.granero@uab.cat</email></corresp>
</author-notes>
<pub-date pub-type="epub">
<day>08</day>
<month>08</month>
<year>2024</year>
</pub-date>
<pub-date pub-type="collection">
<year>2024</year>
</pub-date>
<volume>15</volume>
<elocation-id>1421707</elocation-id>
<history>
<date date-type="received">
<day>22</day>
<month>04</month>
<year>2024</year>
</date>
<date date-type="accepted">
<day>24</day>
<month>07</month>
<year>2024</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2024 Est&#x00E9; Jaloveckas and Granero.</copyright-statement>
<copyright-year>2024</copyright-year>
<copyright-holder>Est&#x00E9; Jaloveckas and Granero</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<sec id="sec1">
<title>Background-objective</title>
<p>The eyes play an important role in communicating emotions and shape the determination of other facial attributes. Here, we explored the relationship between eye size, perceived intensity and attractiveness of facial expressions.</p>
</sec>
<sec id="sec2">
<title>Methods</title>
<p>A sample of <italic>N</italic>&#x2009;=&#x2009;63 participants (men and women, aged 18&#x2013;35) rated attractiveness and emotional intensity for images displaying emotionally expressive women&#x2019;s faces with digitally manipulated eye size (15% smaller, unchanged, or 15% larger).</p>
</sec>
<sec id="sec3">
<title>Results</title>
<p>The analysis of perceived intensity showed an interaction parameter between eye size and gender. Female individuals reported differences when comparing unchanged and larger eyes; male participants showed differences across all eye size comparisons (smaller-unchanged, smaller-larger, unchanged-larger). Regarding perceived attractiveness, faces with smaller eyes registered lower mean scores than both unchanged and larger. The lowest intensity level was associated with neutral faces and the highest with fearful ones. Faces displaying happiness were perceived as the most attractive.</p>
</sec>
<sec id="sec4">
<title>Conclusion</title>
<p>Larger eyes seem to make emotions more intense and attractive. We suggest that the more intense phenomenon serves an evolutive purpose, as it might encourage caretaking behavior.</p>
</sec>
</abstract>
<kwd-group>
<kwd>emotion intensity</kwd>
<kwd>emotion perception</kwd>
<kwd>facial expression</kwd>
<kwd>eye size</kwd>
<kwd>avatar generation</kwd>
</kwd-group>
<counts>
<fig-count count="3"/>
<table-count count="3"/>
<equation-count count="0"/>
<ref-count count="52"/>
<page-count count="11"/>
<word-count count="8760"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Emotion Science</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="sec5">
<label>1</label>
<title>Introduction</title>
<p><xref ref-type="bibr" rid="ref16">Ekman and Friesen (1975)</xref> stated that when wanting to assess the truthfulness of someone&#x2019;s expression, the feature that should be primarily paid attention to is the eyes. <xref ref-type="bibr" rid="ref2">Baron-Cohen et al. (2001)</xref> suggested that a person&#x2019;s state of mind can be decoded by looking at the widening and narrowing of the eyes. <xref ref-type="bibr" rid="ref30">Lee and Anderson (2017)</xref> endorsed the idea that the eyes&#x2019; area reveals emotional states without the need to observe other parts of the face; and that no other facial feature reveals as much information about emotional processing as the eyes. Current studies have also highlighted the fact that facial expressions, including the perception of eye size, can serve as salient nonverbal signals to capture individuals&#x2019; attention (<xref ref-type="bibr" rid="ref10">Carlson and Aday, 2018</xref>; <xref ref-type="bibr" rid="ref1">Aday et al., 2023</xref>).</p>
<p>Conversely, <xref ref-type="bibr" rid="ref15">Eisenbarth and Alpers (2011)</xref> observed that, when initially looking at a face, the first gaze tends to be directed toward either the eyes or the mouth. <xref ref-type="bibr" rid="ref7">Blais et al. (2012)</xref> advocated that the most distinct facial gestures across expressions are found in the mouth area, and that this region provides the most relevant cues to recognize facial expressions. Salience is a key factor in faces with a smile, independently of the eyes (whether or not congruent) that accompany the mouth (<xref ref-type="bibr" rid="ref18">Fern&#x00E1;ndez-Mart&#x00ED;n et al., 2013</xref>; <xref ref-type="bibr" rid="ref8">Calvo et al., 2016</xref>). <xref ref-type="bibr" rid="ref9">Carbon (2020)</xref> proposed that not seeing the lower half of the face (the mouth and surrounding area) impairs emotional recognition to the point of misinterpreting happy, sad, or angry expressions as neutral.</p>
<p><xref ref-type="bibr" rid="ref15">Eisenbarth and Alpers (2011)</xref> addressed the question by subdividing it into specific emotions. According to these authors, sadness and anger are mostly noticed in the eyes. Attention is mainly directed toward the mouth in happy faces; and in fearful and neutral expressions, the eyes and mouth are equally important. Paradoxically, <xref ref-type="bibr" rid="ref3">Barrett (2018)</xref> indicated that a clear indicator of fear are widely open eyes. <xref ref-type="bibr" rid="ref16">Ekman and Friesen (1975)</xref> argued that, although fear is initially shown in the eyebrows, its intensity is expressed by the openness of the eyes. Notably, when contradictory stimuli are shown (i.e., happy mouth and sad eyes simultaneously, or vice versa), people choose to rely on the mouth (<xref ref-type="bibr" rid="ref14">Dunlap, 1927</xref>). Recent studies have compared emotions in faces with and without surgical masks. <xref ref-type="bibr" rid="ref29">Langbehn et al. (2022)</xref> showed that, when the mouth is hidden, the information about the emotion on the face shrinks. <xref ref-type="bibr" rid="ref48">Tsantani et al. (2022)</xref> supported the idea that the intensity of facial expressions is reduced in all basic emotions&#x2014;joy, fear, anger, sadness, disgust, and surprise (<xref ref-type="bibr" rid="ref17">Ekman et al., 1969</xref>; <xref ref-type="bibr" rid="ref26">Jack et al., 2012</xref>)&#x2014;except anger, when the mouth is hidden. <xref ref-type="bibr" rid="ref27">Kim et al. (2022)</xref> observed that, when compared with the eyes, the mouth was most salient for happiness, sadness, and anger; however, in the case of fear, the eyes achieved higher relevance.</p>
<p>Succinctly, empirical research suggests that both eyes and mouth are important for emotional decoding; that the information drawn from the face varies depending on the emotion; and that attention is paid to one or another part of the face depending on the emotion on display. Furthermore, it has been hypothesized that it is not the components of the face, but rather the areas (i.e., the center), that attract our attention. However, the more familiar the face, the higher the chance that attention will be drawn to the eyes (<xref ref-type="bibr" rid="ref42">Royer et al., 2016</xref>). Other components of the face also claim visual attention [e.g., eyebrows are fundamental in anger (<xref ref-type="bibr" rid="ref16">Ekman and Friesen, 1975</xref>)]. Therefore, it is not a simple eye vs. mouth question for all emotions.</p>
<p>Rather than analyzing which components or facial areas provide more information, some studies have focused on how those features work together in the face (<xref ref-type="bibr" rid="ref4">Bartlett and Searcy, 1993</xref>; <xref ref-type="bibr" rid="ref52">Xiao et al., 2014</xref>; <xref ref-type="bibr" rid="ref33">Matsushita et al., 2015a</xref>). It has been suggested that faces could be recognized in a holistic way (<xref ref-type="bibr" rid="ref35">Maurer et al., 2002</xref>) (as a unit rather than as a collection of components). However, emotions could be interpreted in different ways. For example, <xref ref-type="bibr" rid="ref45">Tanaka et al. (2012)</xref> suggested that emotion recognition should be holistic when information is contradictory (such as a smiley face with angry eyebrows), but divided into different elements when all components point in the same direction (i.e., a happy face). Contrarily, <xref ref-type="bibr" rid="ref37">Omigbodun and Cottrell (2013)</xref> argued that facial expression processing is essentially holistic, independent of the components. <xref ref-type="bibr" rid="ref13">Curby et al. (2012)</xref> proposed that differences in the processing type are associated with the emotion types, negative emotions being less globally processed. Regarding the rating of attractiveness, facial proportions have been suggested to play a fundamental role in the holistic assessment of beauty in women (<xref ref-type="bibr" rid="ref50">Ulrich et al., 2019</xref>).</p>
<p>Studies have addressed the relationship between eye size, perceived level of attractiveness and perceived intensity of emotions (<xref ref-type="bibr" rid="ref41">Reis et al., 1990</xref>; <xref ref-type="bibr" rid="ref38">Oosterhof and Todorov, 2009</xref>; <xref ref-type="bibr" rid="ref44">Schmidt et al., 2012</xref>; <xref ref-type="bibr" rid="ref21">Golle et al., 2013</xref>; <xref ref-type="bibr" rid="ref40">Przylipiak et al., 2018</xref>). Regarding the three possible analyses (association between eye size and attractiveness, eye size and intensity, and attractiveness and intensity), research has mainly focused on the impact of eye size on attractiveness, based on the potential evolutionary advantage of larger eyes (increased eye size has been associated with improved visual perceptiveness, a key capacity for finding food and detecting predators in animals). <xref ref-type="bibr" rid="ref6">Berry and McArthur (1985)</xref> proposed that large eyes, high eyebrows, and narrow chins generate baby-like appearances. <xref ref-type="bibr" rid="ref20">Glocker et al. (2009)</xref> suggested that babies&#x2019; eye size is attractive due to evolution and that cuteness motivates caretaking behavior, which increases the child&#x2019;s chances of survival. Larger eyes have also been linked to attractiveness in adulthood (<xref ref-type="bibr" rid="ref6">Berry and McArthur, 1985</xref>). Although averageness has repeatedly been linked to attractiveness (<xref ref-type="bibr" rid="ref46">Thornhill and Gangestad, 1993</xref>; <xref ref-type="bibr" rid="ref23">Grammer and Thornhill, 1994</xref>; <xref ref-type="bibr" rid="ref43">Scheib et al., 1999</xref>), larger eyes might signify health, which is an influential factor when choosing a sexual mate (<xref ref-type="bibr" rid="ref47">Thornhill and Gangestad, 1999</xref>). When other parts of the face (e.g., nose, lips) are smaller&#x2014;making the eyes more dominant&#x2014;attractiveness rates increase (<xref ref-type="bibr" rid="ref40">Przylipiak et al., 2018</xref>). On the flip side, effects that make the eyes more attractive have been shown to make them seem larger (<xref ref-type="bibr" rid="ref34">Matsushita et al., 2015b</xref>). These studies suggest that eye size also has an influence on attraction in adulthood. Noticeably, the relationship between attractiveness and eye size has been analyzed using essentially positive emotions, mostly happiness (<xref ref-type="bibr" rid="ref51">Wagner, 1990</xref>), and it has been observed that faces characterized by positive emotions are perceived as more attractive than faces with neutral or negative expressions (<xref ref-type="bibr" rid="ref41">Reis et al., 1990</xref>; <xref ref-type="bibr" rid="ref38">Oosterhof and Todorov, 2009</xref>; <xref ref-type="bibr" rid="ref44">Schmidt et al., 2012</xref>; <xref ref-type="bibr" rid="ref21">Golle et al., 2013</xref>; <xref ref-type="bibr" rid="ref49">Ueda et al., 2016</xref>).</p>
<p>The link between attractiveness and intensity has only been found to be significant when positive emotions are considered. The study by <xref ref-type="bibr" rid="ref21">Golle et al. (2013)</xref> observed that happier faces were perceived as more attractive than faces reflecting other emotion types. Another study aiming to compare the intensity and attractiveness levels of faces with happy, sad, and neutral expressions observed that more intense happy faces were considered more attractive than less intense happy faces (<xref ref-type="bibr" rid="ref49">Ueda et al., 2016</xref>). This work also observed that sad faces evaluated as more intense were not perceived as less attractive. This evidence suggests that positive and negative emotions do not have opposite effects on the perception of attractiveness. Other studies also obtained higher intensity levels associated with angry and happy faces, as compared with neutral faces (<xref ref-type="bibr" rid="ref19">Garrido and Prada, 2017</xref>).</p>
<p>Studies have also explored the role of the participants&#x2019; gender when interpreting emotion intensity assessments of facial expressions. <xref ref-type="bibr" rid="ref24">Hama and Koeda (2023)</xref> suggested that female participants rated happy and sad expressions as more intense than male individuals. Happiness has also been identified as more intensely perceived among women rating female images, and less intensely when men are rating male images. <xref ref-type="bibr" rid="ref22">Gong et al. (2018)</xref> have also suggested that females perceive both positive and negative emotions as more intense than males. However, these results also seem to be associated with age and culture. The aforementioned studies were carried out in Japan and China. A study carried out with German participants (<xref ref-type="bibr" rid="ref22">Gong et al., 2018</xref>) suggested that women perceive negative emotions more intensely and positive emotions less intensely, when compared to men. Regarding attractiveness levels, studies suggest that this measure may be similar regardless of participants&#x2019; gender (<xref ref-type="bibr" rid="ref28">Kranz and Ishai, 2006</xref>; <xref ref-type="bibr" rid="ref32">Levy et al., 2008</xref>).</p>
<p>In summary, empirical research provides inconsistent results about the role of the eyes in the recognition of emotions. A number of studies support the theory that the eyes provide cues to the emotional state of people, thus claiming the main role in the recognition of emotions in the face (<xref ref-type="bibr" rid="ref16">Ekman and Friesen, 1975</xref>; <xref ref-type="bibr" rid="ref2">Baron-Cohen et al., 2001</xref>; <xref ref-type="bibr" rid="ref30">Lee and Anderson, 2017</xref>). Other findings suggest that the eyes are not always the main values of emotion, but that this role is instead claimed by the mouth (<xref ref-type="bibr" rid="ref14">Dunlap, 1927</xref>; <xref ref-type="bibr" rid="ref7">Blais et al., 2012</xref>; <xref ref-type="bibr" rid="ref9">Carbon, 2020</xref>; <xref ref-type="bibr" rid="ref29">Langbehn et al., 2022</xref>). Either way, this approach sustains that eyes could be a central element (primary or secondary, in addition to the mouth) for decoding and assessing emotional states, and that the specific role of the eye could vary depending on the emotion type (<xref ref-type="bibr" rid="ref15">Eisenbarth and Alpers, 2011</xref>; <xref ref-type="bibr" rid="ref27">Kim et al., 2022</xref>; <xref ref-type="bibr" rid="ref48">Tsantani et al., 2022</xref>). An alternative theory proposes that emotions, like faces, are processed in a holistic way, rather than by separate components (<xref ref-type="bibr" rid="ref37">Omigbodun and Cottrell, 2013</xref>). However, results in this research area are contradictory (<xref ref-type="bibr" rid="ref13">Curby et al., 2012</xref>; <xref ref-type="bibr" rid="ref45">Tanaka et al., 2012</xref>), suggesting that although some features of the face could be relevant in understanding specific emotions, the face as a whole could also have effects on emotional decoding.</p>
<p>Previous studies have also established a relationship between perceived attractiveness and intensity of emotions, as well as between attractiveness and eye size. It must be stressed that the link between intensity and eye size remains mostly unexplored, and some results seem inconsistent and related to the emotions type. Overall, eye size ratings have been shown to increase alongside attractiveness, and apace with the intensity of happiness (<xref ref-type="bibr" rid="ref49">Ueda et al., 2016</xref>). Fewer studies have addressed negative emotions. Regarding attractiveness, negative emotions are perceived as less attractive than happy and neutral ones (<xref ref-type="bibr" rid="ref49">Ueda et al., 2016</xref>; <xref ref-type="bibr" rid="ref19">Garrido and Prada, 2017</xref>). Regarding intensity, <xref ref-type="bibr" rid="ref15">Eisenbarth and Alpers (2011)</xref> argued that eyes achieve a more central role than the mouth for the assessment of angry and sad faces, which in turn might make bigger eyes distort the perception of intensity in those emotions. Moreover, fearful and neutral faces are understood equally from the eyes and the mouth, which reduces the role of the eyes in comparison to the other emotions. Paradoxically, fearful faces are characteristically recognized by widened eyes (<xref ref-type="bibr" rid="ref3">Barrett, 2018</xref>), which suggests the relevance of the eyes when it comes to this emotion. Gender and age might also have an effect on intensity (<xref ref-type="bibr" rid="ref22">Gong et al., 2018</xref>; <xref ref-type="bibr" rid="ref24">Hama and Koeda, 2023</xref>). Participant&#x2019;s gender and sexual preference should also be considered when interpreting attractiveness ratings (<xref ref-type="bibr" rid="ref28">Kranz and Ishai, 2006</xref>; <xref ref-type="bibr" rid="ref32">Levy et al., 2008</xref>).</p>
<p>Additionally, when human representations are artificially altered to diverge from reality, it is essential to consider the degree of these changes. The uncanny valley is the phenomenon in which artificial human representations are perceived as uncomfortable as they become more similar to reality (<xref ref-type="bibr" rid="ref36">Mori et al., 2012</xref>). This phenomenon is commonly seen in video games, robotics, and human-like dolls (<xref ref-type="bibr" rid="ref11">Cheetham, 2011</xref>).</p>
<p>To summarize, there is debate concerning to what extent emotions are perceived in the eyes. Empirical results suggest that eye size could be a key indicator of some emotions, and that this characteristic might modify the intensity of the emotional cues provided by the face as a whole. Concretely, larger eyes would make happy faces look happier, angry faces angrier, and so on. According to evolution-based theories, neutral faces with bigger eyes may also be rated as more intense than those with smaller eyes (<xref ref-type="bibr" rid="ref6">Berry and McArthur, 1985</xref>; <xref ref-type="bibr" rid="ref20">Glocker et al., 2009</xref>). The hypothesis that eye size intensifies facial emotional clues also supports the argument that emotions are processed holistically, and that emotional expressions are deciphered by understanding the face as a compendium of pieces that must be looked at together, as the emotions in which the eyes do not have as much of an effect (e.g., happiness) would still be affected by the face in its entirety. However, the results are uncertain and new empirical studies are needed to consolidate (or refute) this line of research.</p>
<p>The main goal of the present study was to examine the relationship between eye size and facial emotional expression with the perceived intensity and attractiveness ratings. In addition, the study explored the potential moderator role of gender and age within the aforementioned relationships. Based on the existing empirical studies, we hypothesized that larger eyes should be associated with higher scores in attractiveness and intensity, particularly for fear and happiness. Due to the lack of previous empirical evidence, no hypothesis was formulated regarding the potential interaction of gender and age in the relationships.</p>
</sec>
<sec sec-type="materials|methods" id="sec6">
<label>2</label>
<title>Materials and methods</title>
<sec id="sec7">
<label>2.1</label>
<title>Sample</title>
<p>The experiment was shared as a URL. Participants were recruited with a short text message introducing them to the study via social media (e.g., Instagram) and online chats (e.g., WhatsApp). The URL was further spread by means of snowball sampling. Participants volunteered their time. There were no economic or otherwise relevant incentives. Inclusion criteria for all participants were: to have normal or corrected to normal vision and to be between 18 and 35&#x2009;years of age. No participants were removed due to gender, age, ethnicity, or occupation.</p>
<p>An initial sample of <italic>N</italic> =&#x2009;82 individuals started the experiment. Nineteen subjects did not complete the first section with the demographics or abandoned during the training task, which led to a final sample consisting of <italic>N</italic> =&#x2009;63 participants (43 were classified as <italic>Female</italic>, 17 as <italic>Male</italic> and 3 as <italic>Other</italic>) who completed the experimental task. The task was not only repetitive but also long to complete. We hypothesize it required too much time and some participants lost motivation to continue. Those two aspects might have led the 19 subjects to abandon the experiment before completing it.</p>
<p><xref ref-type="fig" rid="fig1">Figure 1</xref> shows the flow-chart with the sampling process (participants retained during the experimental task and dropouts). No statistically significant differences between completers and dropouts were observed for gender (<italic>p</italic>&#x2009;=&#x2009;0.051), age (<italic>p</italic>&#x2009;=&#x2009;0.082), employment status (<italic>p</italic>&#x2009;=&#x2009;0.264), and ethnicity (<italic>p</italic>&#x2009;=&#x2009;0.440).</p>
<fig position="float" id="fig1">
<label>Figure 1</label>
<caption><p>Flow-chart with the sampling method.</p></caption>
<graphic xlink:href="fpsyg-15-1421707-g001.tif"/>
</fig>
</sec>
<sec id="sec8">
<label>2.2</label>
<title>Materials</title>
<sec id="sec9">
<label>2.2.1</label>
<title>Stimuli</title>
<p>To conduct the study, some of the previously defined basic emotions were used: anger, joy, fear, and sadness. Not all original Ekman&#x2019;s facial expressions&#x2014;joy, fear, anger, sadness, disgust, and surprise (<xref ref-type="bibr" rid="ref17">Ekman et al., 1969</xref>; <xref ref-type="bibr" rid="ref26">Jack et al., 2012</xref>)&#x2014;were selected for the experimental task, as doing so would have required more time to complete it, and a longer task might have increased the number of dropouts. A neutral expression was used to set a baseline for the intensity ratings. The emotions chosen for this study were selected based on those that have received the most attention in previous scientific research. Some of the studies addressing the topics of intensity, attractiveness or eye size in relation to decoding facial expressions have used either the exact same emotions used in this study (<xref ref-type="bibr" rid="ref15">Eisenbarth and Alpers, 2011</xref>) or a subset of them (<xref ref-type="bibr" rid="ref38">Oosterhof and Todorov, 2009</xref>; <xref ref-type="bibr" rid="ref10">Carlson and Aday, 2018</xref>; <xref ref-type="bibr" rid="ref1">Aday et al., 2023</xref>).</p>
<p>Thirty-five pictures&#x2014;seven female identities, each displaying five facial expressions: neutral, happiness, fear, sadness and anger&#x2014;were selected from <italic>The racially diverse affective expression (RADIATE) face stimulus set</italic> (<xref ref-type="bibr" rid="ref12">Conley et al., 2018</xref>). The code-files used for the experimental trials were faces AF01, AF12, BF03, BF09, WF06, and WF10; and the code-file used for the training trial was face WF15. All the images corresponded to female identities spanning a mixture of ethnicities (Asian, Black, White). There were two female faces of each ethnic group. Male identities were not included with the purpose of reducing the time needed to complete the experiment, as the inclusion of a new experimental condition (male faces) would have doubled the time needed to complete the experiment, thus increasing the chances that a larger number of participants abandoned the study, as they received no monetary or otherwise compensation for their participation. Furthermore, the consideration of this new experimental condition would have required the inclusion of a new experimental factor, and the planned sample size might not have allowed adequate statistical power. Female faces were chosen over male ones because it is more common to use male identities, rather than female, in psychological experiments; doing it differently adds representativity to the field. Faces with hair in front of the eyes in any expression were discarded, as this would have complicated the process of digitally altering the eye size within the stimuli.</p>
<p>Three versions of each original picture were used in the experiment. The original, unmodified image was used for the medium-sized eyes. Previous research has worked with values of 5&#x2013;10% change (<xref ref-type="bibr" rid="ref20">Glocker et al., 2009</xref>) in similar experiments. With the purpose of assessing a different degree of change than the ones already studied and published by the scientific community, the eyes were changed 15% from their original size. Each picture was modified twice. The eyes were reduced 15% for the smaller eye images and increased 15% for the larger ones (including eyelids and eyelashes, excluding eyebrows). The eyes area was enlarged or reduced as necessary, and then blended into the picture so that the change was not obvious. To edit the images <italic>Adobe Photoshop</italic> version 16.0 was used. See <xref ref-type="fig" rid="fig2">Figure 2</xref> for examples of images used during the experiment with different eye sizes, as well as an example of how the eyes area was modified. The original unchanged images are part of the open-access RADIATE face stimulus set published by <xref ref-type="bibr" rid="ref12">Conley et al. (2018)</xref>.</p>
<fig position="float" id="fig2">
<label>Figure 2</label>
<caption><p>Eye sizes on a scared face used during the experimental trials.</p></caption>
<graphic xlink:href="fpsyg-15-1421707-g002.tif"/>
</fig>
<p>Fifteen images of one female identity (five expressions, three eye sizes) were readied to be used only during an initial training task. Ninety images of the remaining six female identities (five expressions, three eye sizes, each) were readied to be used during the experimental task. The identities used for the practice and experimental trials remained constant for all participants.</p>
<p>The experiment was built and conducted using Sona Systems written in PHP and JavaScript, using MySQL for the database. Demographics and experimental data were collected online with participants able to undertake the task on any computer/laptop but not tablet or smart device.</p>
</sec>
</sec>
<sec id="sec10">
<label>2.3</label>
<title>Data collection</title>
<p>Five fixed lists of stimuli were created for the experimental trials. Participants were randomly assigned to one of the lists when they started the experiment. The order of the images was randomized beforehand for the purpose of facilitating the technicalities of setting up the experiment. Images with the same emotion and human face were not shown twice in a row during the trials so that the difference in eye sizes was not apparent. MATLAB software and in-house code were used to create the random list of images, with the condition that images of the same identity and expression with different eye size did not follow each other. The experimental trials were broken down into three equally long blocks with self-paced short breaks in between to avoid tiredness. During the breaks an encouraging message was shown on the screen accompanied by a picture of an animal. One main reason for dropping out of experimental studies is a lack of motivation, particularly in studies with no financial compensation for participation. The message was meant to keep participants engaged in the experimental task with the purpose of reducing the number of dropouts. The message was also meant to let participants know it was the moment to take a pause should they need it. Additionally, the message indicated how far along in the experiment they had arrived (1/3 the first message and 2/3 the second message). The purpose of the picture was to make the experiment less mentally tiring.</p>
<p>Participants were naive to the experimental tasks and purpose. The entire experiment was carried out in approximately 20&#x2009;min. Firstly, participants were shown a welcoming message that introduced them to the study. Next, they were asked to complete the demographic section, and a training task to ensure that they adequately understood how to perform the experimental task. Participants were instructed to complete both the practice and experimental tasks in a quiet space with stable internet connection, and to maximize their browser.</p>
<p>There were a total of 95 trials (including the five training trials). Each training trial displayed one of the five emotions (neutral, happiness, fear, sadness, anger), with one of the eye sizes. On all trials, participants evaluated each face twice, rating first the intensity of the emotion shown, and then the attractiveness of each face, on a scale from 1 to 7. During the trials the images were presented in color, placed in the center of the screen and covered approximately 40% of the screen space. A written question shown below each image informed the participant of the task: (1) <italic>How intense do you find her expression?</italic> (2) <italic>How attractive do you find her?</italic> The image remained on screen until both evaluations had been made. The order of questions was fixed. Participants responded based on a seven-point Likert scale from 1 (lowest intensity level: <italic>Not very intense</italic>/<italic>Not very attractive</italic>) to 7 (highest intensity level: <italic>Very intense</italic>/<italic>Very attractive</italic>) located below the question. Participants were only informed of the interpretation of the extremes of the scale (values 1 and 7), similar to an analog scale. Each image was preceded by a fixation cross for 500 milliseconds, and subsequent trials began immediately after the second response.</p>
<p>After the training task, participants moved on to the experimental task. There were six experimental trials per condition. Only female faces were selected for the experiment. The experimental task consisted of 90 trials split into three equal blocks. Breaks were self-paced and featured a message (e.g., &#x201C;<italic>End of part 1 (of 3), you are doing great!</italic>&#x201D;) and an accompanying encouraging picture. The procedure followed the same structure as in the training trials. Participants were shown one image at a time. There was no time limit to rate the images. Once finished, participants were debriefed and thanked for participating (<xref rid="SM1" ref-type="supplementary-material">Supplementary Table S1</xref>).</p>
<p>Participants rated the intensity and attractiveness of 90 images, that corresponded to 6 different human faces presented in 15 experimental conditions (three eye sizes &#x00D7; five emotions). The statistical analysis was performed for the mean value calculated for the 6 human faces in each experimental condition. That is, each participant provided 15 measures of the attractiveness level and 15 measures of the intensity level (for the combination of the three eye sizes with the five emotion types).</p>
</sec>
<sec id="sec11">
<label>2.4</label>
<title>Statistical analysis</title>
<p>The statistical analysis was carried out with SPSS24 for Windows. Repeated measures analysis of variance (ANOVA) was used to assess the effect of eye size (within-subjects factor with three levels: small, unchanged, large) and emotion type (within-subjects factor with five levels: neutral, happiness, fear, sadness, anger). To assess the potential moderator effect of the participants&#x2019; gender (female, male) and age (young, middle), two between-subjects factors were also added into the model and tested, resulting in a 3&#x2009;&#x00D7;&#x2009;5&#x2009;&#x00D7;&#x2009;2&#x2009;&#x00D7;&#x2009;2 ANOVA (eye &#x00D7; emotion &#x00D7; gender &#x00D7; age). The dependent variables of the analyses were the perceived intensity and attractiveness of the visualized emotions.</p>
<p>Mauchly&#x2019;s test was used to assess the sphericity condition for the ANOVA procedures, and if the assumption was not met (<italic>p</italic>&#x2009;&#x003C;&#x2009;0.05) Greenhouse&#x2013;Geisser corrected tests were selected. The effect size of the parameters obtained in the ANOVA was measured through partial eta-squared coefficients (&#x03B7;<italic><sub>p</sub></italic><sup>2</sup>), considering values of 0.06 as poor, 0.10 as moderate&#x2013;mild and 0.25 as high-large (<xref ref-type="bibr" rid="ref31">Levine and Hullet, 2002</xref>). Because of the relatively small sample size (that could have had an impact on the statistical power), relevant effects were considered for both any statistical significance (<italic>p</italic>&#x2009;&#x2264;&#x2009;0.05) or effect sizes at least in the moderate&#x2013;mild range (&#x03B7;<italic><sub>p</sub></italic><sup>2</sup>&#x2009;&#x2265;&#x2009;0.01).</p>
</sec>
<sec id="sec12">
<label>2.5</label>
<title>Ethical considerations</title>
<p>The experiment was approved by the ethics board of the Department of Psychological Sciences, Birkbeck College, University of London (approval number: 2122068, date of approval: 09/05/2022). All the participants provided informed consent by ticking a box within the website before starting the experiment. Participants did not receive economic compensation for their engagement in the research.</p>
</sec>
</sec>
<sec sec-type="results" id="sec13">
<label>3</label>
<title>Results</title>
<sec id="sec14">
<label>3.1</label>
<title>Descriptive for the sample</title>
<p><xref ref-type="table" rid="tab1">Table 1</xref> shows the descriptive for the sample. Most participants were female (68.3%), white-European (81.0%), and employed (44.4%). Mean age was 25.2&#x2009;years (SD&#x2009;=&#x2009;5.0). No differences between female and male participants were obtained for ethnicity (&#x03C7;<sup>2</sup>&#x2009;=&#x2009;0.42, <italic>p</italic>&#x2009;=&#x2009;1.00), employment status (&#x03C7;<sup>2</sup>&#x2009;=&#x2009;2.26, <italic>p</italic>&#x2009;=&#x2009;0.447) and chronological mean age (<italic>F</italic>&#x2009;=&#x2009;0.99, <italic>p</italic>&#x2009;=&#x2009;0.377).</p>
<table-wrap position="float" id="tab1">
<label>Table 1</label>
<caption><p>Descriptive for the sample.</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th/>
<th align="center" valign="top"><italic>n</italic></th>
<th align="center" valign="top">%</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top" colspan="3">Gender</td>
</tr>
<tr>
<td align="left" valign="top">Female</td>
<td align="center" valign="top">43</td>
<td align="center" valign="top">68.25%</td>
</tr>
<tr>
<td align="left" valign="top">Male</td>
<td align="center" valign="top">17</td>
<td align="center" valign="top">26.98%</td>
</tr>
<tr>
<td align="left" valign="top">Other</td>
<td align="center" valign="top">2</td>
<td align="center" valign="top">3.17%</td>
</tr>
<tr>
<td align="left" valign="top">Missing</td>
<td align="center" valign="top">1</td>
<td align="center" valign="top">1.59%</td>
</tr>
<tr>
<td align="left" valign="top" colspan="3">Ethnicity</td>
</tr>
<tr>
<td align="left" valign="top">White &#x2013; European</td>
<td align="center" valign="top">51</td>
<td align="center" valign="top">80.95%</td>
</tr>
<tr>
<td align="left" valign="top">Other</td>
<td align="center" valign="top">10</td>
<td align="center" valign="top">15.87%</td>
</tr>
<tr>
<td align="left" valign="top">Missing</td>
<td align="center" valign="top">2</td>
<td align="center" valign="top">3.17%</td>
</tr>
<tr>
<td align="left" valign="top" colspan="3">Employment</td>
</tr>
<tr>
<td align="left" valign="top">Employed</td>
<td align="center" valign="top">28</td>
<td align="center" valign="top">44.44%</td>
</tr>
<tr>
<td align="left" valign="top">Student</td>
<td align="center" valign="top">27</td>
<td align="center" valign="top">42.86%</td>
</tr>
<tr>
<td align="left" valign="top">Missing</td>
<td align="center" valign="top">8</td>
<td align="center" valign="top">12.70%</td>
</tr>
<tr>
<td/>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
</tr>
<tr>
<td align="left" valign="top">Aye (years-old)</td>
<td align="center" valign="top">25.19</td>
<td align="center" valign="top">4.97</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p>SD, standard deviation.</p>
</table-wrap-foot>
</table-wrap>
<p><xref ref-type="table" rid="tab2">Table 2</xref> contains the descriptive for both the intensity and the attractiveness levels in the study, among the total sample and stratified by participants&#x2019; gender and age (classification into the two age groups was based on the median [percentile 50] estimated in the sample). <xref rid="SM1" ref-type="supplementary-material">Supplementary Figure S1</xref> shows line-plots with the mean scores for both intensity and attractiveness, considering the within-subjects factors (eye size and emotion type) and the between-subjects factors (gender and age).</p>
<table-wrap position="float" id="tab2">
<label>Table 2</label>
<caption><p>Descriptive for intensity and attractiveness levels.</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th/>
<th/>
<th/>
<th align="center" valign="top" colspan="10">Emotion</th>
</tr>
<tr>
<th/>
<th/>
<th/>
<th align="center" valign="top" colspan="2">Neutral</th>
<th align="center" valign="top" colspan="2">Happiness</th>
<th align="center" valign="top" colspan="2">Fear</th>
<th align="center" valign="top" colspan="2">Sadness</th>
<th align="center" valign="top" colspan="2">Anger</th>
</tr>
<tr>
<th align="left" valign="top">Total sample</th>
<th align="center" valign="top" colspan="2"><italic>N</italic> =&#x2009;63</th>
<th align="center" valign="top">Mean</th>
<th align="center" valign="top">SD</th>
<th align="center" valign="top">Mean</th>
<th align="center" valign="top">SD</th>
<th align="center" valign="top">Mean</th>
<th align="center" valign="top">SD</th>
<th align="center" valign="top">Mean</th>
<th align="center" valign="top">SD</th>
<th align="center" valign="top">Mean</th>
<th align="center" valign="top">SD</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top" rowspan="3">Intensity</td>
<td align="left" valign="top" rowspan="3">Eye</td>
<td align="left" valign="top">Small</td>
<td align="center" valign="top">2.11</td>
<td align="center" valign="top">1.05</td>
<td align="center" valign="top">4.67</td>
<td align="center" valign="top">1.09</td>
<td align="center" valign="top">5.78</td>
<td align="center" valign="top">0.72</td>
<td align="center" valign="top">4.18</td>
<td align="center" valign="top">1.01</td>
<td align="center" valign="top">4.84</td>
<td align="center" valign="top">0.79</td>
</tr>
<tr>
<td align="left" valign="top">Unchanged</td>
<td align="center" valign="top">2.16</td>
<td align="center" valign="top">1.05</td>
<td align="center" valign="top">4.79</td>
<td align="center" valign="top">1.03</td>
<td align="center" valign="top">5.77</td>
<td align="center" valign="top">0.67</td>
<td align="center" valign="top">4.18</td>
<td align="center" valign="top">0.88</td>
<td align="center" valign="top">4.83</td>
<td align="center" valign="top">0.86</td>
</tr>
<tr>
<td align="left" valign="top">Large</td>
<td align="center" valign="top">2.37</td>
<td align="center" valign="top">1.07</td>
<td align="center" valign="top">4.85</td>
<td align="center" valign="top">1.13</td>
<td align="center" valign="top">5.98</td>
<td align="center" valign="top">0.74</td>
<td align="center" valign="top">4.33</td>
<td align="center" valign="top">0.91</td>
<td align="center" valign="top">5.03</td>
<td align="center" valign="top">0.89</td>
</tr>
<tr>
<td align="left" valign="top" rowspan="3">Attractiveness</td>
<td align="left" valign="top" rowspan="3">Eye</td>
<td align="left" valign="top">Small</td>
<td align="center" valign="top">2.29</td>
<td align="center" valign="top">1.01</td>
<td align="center" valign="top">3.25</td>
<td align="center" valign="top">1.35</td>
<td align="center" valign="top">2.36</td>
<td align="center" valign="top">0.98</td>
<td align="center" valign="top">2.36</td>
<td align="center" valign="top">0.92</td>
<td align="center" valign="top">2.51</td>
<td align="center" valign="top">0.94</td>
</tr>
<tr>
<td align="left" valign="top">Unchanged</td>
<td align="center" valign="top">2.92</td>
<td align="center" valign="top">1.07</td>
<td align="center" valign="top">3.90</td>
<td align="center" valign="top">1.40</td>
<td align="center" valign="top">2.74</td>
<td align="center" valign="top">1.00</td>
<td align="center" valign="top">2.78</td>
<td align="center" valign="top">0.96</td>
<td align="center" valign="top">2.79</td>
<td align="center" valign="top">1.02</td>
</tr>
<tr>
<td align="left" valign="top">Large</td>
<td align="center" valign="top">2.81</td>
<td align="center" valign="top">1.01</td>
<td align="center" valign="top">3.79</td>
<td align="center" valign="top">1.23</td>
<td align="center" valign="top">2.63</td>
<td align="center" valign="top">0.95</td>
<td align="center" valign="top">2.72</td>
<td align="center" valign="top">1.00</td>
<td align="center" valign="top">2.83</td>
<td align="center" valign="top">1.00</td>
</tr>
<tr>
<td align="left" valign="top">Female</td>
<td align="center" valign="top" colspan="2"><italic>N</italic> =&#x2009;43</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
</tr>
<tr>
<td align="left" valign="top" rowspan="3">Intensity</td>
<td align="left" valign="top" rowspan="3">Eye</td>
<td align="left" valign="top">Small</td>
<td align="center" valign="top">2.05</td>
<td align="center" valign="top">1.08</td>
<td align="center" valign="top">4.67</td>
<td align="center" valign="top">1.21</td>
<td align="center" valign="top">5.80</td>
<td align="center" valign="top">0.75</td>
<td align="center" valign="top">4.18</td>
<td align="center" valign="top">1.12</td>
<td align="center" valign="top">4.83</td>
<td align="center" valign="top">0.91</td>
</tr>
<tr>
<td align="left" valign="top">Unchanged</td>
<td align="center" valign="top">2.03</td>
<td align="center" valign="top">1.02</td>
<td align="center" valign="top">4.77</td>
<td align="center" valign="top">1.05</td>
<td align="center" valign="top">5.74</td>
<td align="center" valign="top">0.70</td>
<td align="center" valign="top">4.16</td>
<td align="center" valign="top">0.98</td>
<td align="center" valign="top">4.79</td>
<td align="center" valign="top">0.96</td>
</tr>
<tr>
<td align="left" valign="top">Large</td>
<td align="center" valign="top">2.17</td>
<td align="center" valign="top">0.97</td>
<td align="center" valign="top">4.74</td>
<td align="center" valign="top">1.17</td>
<td align="center" valign="top">5.96</td>
<td align="center" valign="top">0.74</td>
<td align="center" valign="top">4.22</td>
<td align="center" valign="top">0.94</td>
<td align="center" valign="top">5.00</td>
<td align="center" valign="top">0.95</td>
</tr>
<tr>
<td align="left" valign="top" rowspan="3">Attractiveness</td>
<td align="left" valign="top" rowspan="3">Eye</td>
<td align="left" valign="top">Small</td>
<td align="center" valign="top">2.30</td>
<td align="center" valign="top">1.11</td>
<td align="center" valign="top">3.28</td>
<td align="center" valign="top">1.40</td>
<td align="center" valign="top">2.43</td>
<td align="center" valign="top">1.08</td>
<td align="center" valign="top">2.33</td>
<td align="center" valign="top">0.98</td>
<td align="center" valign="top">2.51</td>
<td align="center" valign="top">1.04</td>
</tr>
<tr>
<td align="left" valign="top">Unchanged</td>
<td align="center" valign="top">2.90</td>
<td align="center" valign="top">1.15</td>
<td align="center" valign="top">3.93</td>
<td align="center" valign="top">1.41</td>
<td align="center" valign="top">2.78</td>
<td align="center" valign="top">1.08</td>
<td align="center" valign="top">2.82</td>
<td align="center" valign="top">1.07</td>
<td align="center" valign="top">2.80</td>
<td align="center" valign="top">1.16</td>
</tr>
<tr>
<td align="left" valign="top">Large</td>
<td align="center" valign="top">2.81</td>
<td align="center" valign="top">1.03</td>
<td align="center" valign="top">3.74</td>
<td align="center" valign="top">1.24</td>
<td align="center" valign="top">2.64</td>
<td align="center" valign="top">1.01</td>
<td align="center" valign="top">2.70</td>
<td align="center" valign="top">1.10</td>
<td align="center" valign="top">2.82</td>
<td align="center" valign="top">1.10</td>
</tr>
<tr>
<td align="left" valign="top">Male</td>
<td align="center" valign="top" colspan="2"><italic>N</italic> =&#x2009;17</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
</tr>
<tr>
<td align="left" valign="top" rowspan="3">Intensity</td>
<td align="left" valign="top" rowspan="3">Eye</td>
<td align="left" valign="top">Small</td>
<td align="center" valign="top">2.27</td>
<td align="center" valign="top">1.03</td>
<td align="center" valign="top">4.75</td>
<td align="center" valign="top">0.60</td>
<td align="center" valign="top">5.70</td>
<td align="center" valign="top">0.69</td>
<td align="center" valign="top">4.18</td>
<td align="center" valign="top">0.77</td>
<td align="center" valign="top">4.82</td>
<td align="center" valign="top">0.42</td>
</tr>
<tr>
<td align="left" valign="top">Unchanged</td>
<td align="center" valign="top">2.46</td>
<td align="center" valign="top">1.13</td>
<td align="center" valign="top">4.92</td>
<td align="center" valign="top">0.80</td>
<td align="center" valign="top">5.86</td>
<td align="center" valign="top">0.62</td>
<td align="center" valign="top">4.29</td>
<td align="center" valign="top">0.58</td>
<td align="center" valign="top">4.91</td>
<td align="center" valign="top">0.59</td>
</tr>
<tr>
<td align="left" valign="top">Large</td>
<td align="center" valign="top">2.89</td>
<td align="center" valign="top">1.20</td>
<td align="center" valign="top">5.15</td>
<td align="center" valign="top">0.86</td>
<td align="center" valign="top">6.11</td>
<td align="center" valign="top">0.75</td>
<td align="center" valign="top">4.67</td>
<td align="center" valign="top">0.82</td>
<td align="center" valign="top">5.07</td>
<td align="center" valign="top">0.72</td>
</tr>
<tr>
<td align="left" valign="top" rowspan="3">Attractiveness</td>
<td align="left" valign="top" rowspan="3">Eye</td>
<td align="left" valign="top">Small</td>
<td align="center" valign="top">2.24</td>
<td align="center" valign="top">0.78</td>
<td align="center" valign="top">3.03</td>
<td align="center" valign="top">1.25</td>
<td align="center" valign="top">2.10</td>
<td align="center" valign="top">0.68</td>
<td align="center" valign="top">2.31</td>
<td align="center" valign="top">0.69</td>
<td align="center" valign="top">2.47</td>
<td align="center" valign="top">0.73</td>
</tr>
<tr>
<td align="left" valign="top">Unchanged</td>
<td align="center" valign="top">2.93</td>
<td align="center" valign="top">0.91</td>
<td align="center" valign="top">3.72</td>
<td align="center" valign="top">1.45</td>
<td align="center" valign="top">2.59</td>
<td align="center" valign="top">0.86</td>
<td align="center" valign="top">2.65</td>
<td align="center" valign="top">0.67</td>
<td align="center" valign="top">2.76</td>
<td align="center" valign="top">0.66</td>
</tr>
<tr>
<td align="left" valign="top">Large</td>
<td align="center" valign="top">2.83</td>
<td align="center" valign="top">1.04</td>
<td align="center" valign="top">3.78</td>
<td align="center" valign="top">1.21</td>
<td align="center" valign="top">2.58</td>
<td align="center" valign="top">0.80</td>
<td align="center" valign="top">2.72</td>
<td align="center" valign="top">0.78</td>
<td align="center" valign="top">2.76</td>
<td align="center" valign="top">0.70</td>
</tr>
<tr>
<td align="left" valign="top">Young age<xref ref-type="table-fn" rid="tfn1"><sup>a</sup></xref></td>
<td align="center" valign="top" colspan="2"><italic>N</italic> =&#x2009;31</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
</tr>
<tr>
<td align="left" valign="top" rowspan="3">Intensity</td>
<td align="left" valign="top" rowspan="3">Eye</td>
<td align="left" valign="top">Small</td>
<td align="center" valign="top">2.15</td>
<td align="center" valign="top">0.94</td>
<td align="center" valign="top">4.62</td>
<td align="center" valign="top">1.16</td>
<td align="center" valign="top">5.87</td>
<td align="center" valign="top">0.69</td>
<td align="center" valign="top">4.08</td>
<td align="center" valign="top">1.08</td>
<td align="center" valign="top">4.78</td>
<td align="center" valign="top">0.74</td>
</tr>
<tr>
<td align="left" valign="top">Unchanged</td>
<td align="center" valign="top">2.14</td>
<td align="center" valign="top">0.93</td>
<td align="center" valign="top">4.75</td>
<td align="center" valign="top">1.17</td>
<td align="center" valign="top">5.77</td>
<td align="center" valign="top">0.64</td>
<td align="center" valign="top">4.08</td>
<td align="center" valign="top">0.96</td>
<td align="center" valign="top">4.75</td>
<td align="center" valign="top">0.82</td>
</tr>
<tr>
<td align="left" valign="top">Large</td>
<td align="center" valign="top">2.25</td>
<td align="center" valign="top">0.93</td>
<td align="center" valign="top">4.82</td>
<td align="center" valign="top">1.24</td>
<td align="center" valign="top">5.96</td>
<td align="center" valign="top">0.78</td>
<td align="center" valign="top">4.20</td>
<td align="center" valign="top">0.87</td>
<td align="center" valign="top">4.99</td>
<td align="center" valign="top">0.88</td>
</tr>
<tr>
<td align="left" valign="top" rowspan="3">Attractiveness<xref ref-type="table-fn" rid="tfn1"><sup>a</sup></xref></td>
<td align="left" valign="top" rowspan="3">Eye</td>
<td align="left" valign="top">Small</td>
<td align="center" valign="top">2.15</td>
<td align="center" valign="top">0.97</td>
<td align="center" valign="top">3.23</td>
<td align="center" valign="top">1.58</td>
<td align="center" valign="top">2.22</td>
<td align="center" valign="top">1.00</td>
<td align="center" valign="top">2.34</td>
<td align="center" valign="top">0.94</td>
<td align="center" valign="top">2.40</td>
<td align="center" valign="top">0.87</td>
</tr>
<tr>
<td align="left" valign="top">Unchanged</td>
<td align="center" valign="top">2.71</td>
<td align="center" valign="top">0.99</td>
<td align="center" valign="top">3.79</td>
<td align="center" valign="top">1.58</td>
<td align="center" valign="top">2.46</td>
<td align="center" valign="top">0.96</td>
<td align="center" valign="top">2.56</td>
<td align="center" valign="top">0.88</td>
<td align="center" valign="top">2.57</td>
<td align="center" valign="top">0.91</td>
</tr>
<tr>
<td align="left" valign="top">Large</td>
<td align="center" valign="top">2.61</td>
<td align="center" valign="top">1.02</td>
<td align="center" valign="top">3.71</td>
<td align="center" valign="top">1.44</td>
<td align="center" valign="top">2.46</td>
<td align="center" valign="top">0.88</td>
<td align="center" valign="top">2.56</td>
<td align="center" valign="top">0.95</td>
<td align="center" valign="top">2.67</td>
<td align="center" valign="top">0.94</td>
</tr>
<tr>
<td align="left" valign="top">Middle</td>
<td align="center" valign="top" colspan="2"><italic>N</italic> =&#x2009;32</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
<td align="center" valign="top">Mean</td>
<td align="center" valign="top">SD</td>
</tr>
<tr>
<td align="left" valign="top" rowspan="3">Intensity</td>
<td align="left" valign="top" rowspan="3">Eye</td>
<td align="left" valign="top">Small</td>
<td align="center" valign="top">2.06</td>
<td align="center" valign="top">1.15</td>
<td align="center" valign="top">4.72</td>
<td align="center" valign="top">1.02</td>
<td align="center" valign="top">5.70</td>
<td align="center" valign="top">0.74</td>
<td align="center" valign="top">4.24</td>
<td align="center" valign="top">0.95</td>
<td align="center" valign="top">4.86</td>
<td align="center" valign="top">0.87</td>
</tr>
<tr>
<td align="left" valign="top">Unchanged</td>
<td align="center" valign="top">2.16</td>
<td align="center" valign="top">1.16</td>
<td align="center" valign="top">4.83</td>
<td align="center" valign="top">0.87</td>
<td align="center" valign="top">5.78</td>
<td align="center" valign="top">0.69</td>
<td align="center" valign="top">4.23</td>
<td align="center" valign="top">0.84</td>
<td align="center" valign="top">4.88</td>
<td align="center" valign="top">0.91</td>
</tr>
<tr>
<td align="left" valign="top">Large</td>
<td align="center" valign="top">2.45</td>
<td align="center" valign="top">1.18</td>
<td align="center" valign="top">4.90</td>
<td align="center" valign="top">1.01</td>
<td align="center" valign="top">6.01</td>
<td align="center" valign="top">0.70</td>
<td align="center" valign="top">4.42</td>
<td align="center" valign="top">0.97</td>
<td align="center" valign="top">5.03</td>
<td align="center" valign="top">0.90</td>
</tr>
<tr>
<td align="left" valign="top" rowspan="3">Attractiveness</td>
<td align="left" valign="top" rowspan="3">Eye</td>
<td align="left" valign="top">Small</td>
<td align="center" valign="top">2.41</td>
<td align="center" valign="top">1.03</td>
<td align="center" valign="top">3.24</td>
<td align="center" valign="top">1.11</td>
<td align="center" valign="top">2.48</td>
<td align="center" valign="top">0.95</td>
<td align="center" valign="top">2.37</td>
<td align="center" valign="top">0.90</td>
<td align="center" valign="top">2.60</td>
<td align="center" valign="top">1.00</td>
</tr>
<tr>
<td align="left" valign="top">Unchanged</td>
<td align="center" valign="top">3.08</td>
<td align="center" valign="top">1.13</td>
<td align="center" valign="top">3.95</td>
<td align="center" valign="top">1.23</td>
<td align="center" valign="top">2.99</td>
<td align="center" valign="top">0.97</td>
<td align="center" valign="top">2.98</td>
<td align="center" valign="top">1.00</td>
<td align="center" valign="top">2.98</td>
<td align="center" valign="top">1.09</td>
</tr>
<tr>
<td align="left" valign="top">Large</td>
<td align="center" valign="top">2.98</td>
<td align="center" valign="top">0.98</td>
<td align="center" valign="top">3.83</td>
<td align="center" valign="top">1.00</td>
<td align="center" valign="top">2.77</td>
<td align="center" valign="top">0.99</td>
<td align="center" valign="top">2.83</td>
<td align="center" valign="top">1.05</td>
<td align="center" valign="top">2.95</td>
<td align="center" valign="top">1.04</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="tfn1"><label>a</label><p>The groups of age (young and middle) were defined based on the median (percentile 50) in the study.</p></fn>
<p>Young age: 18&#x2013;25&#x2009;years. Middle age: 26&#x2013;35&#x2009;years. SD, standard error.</p>
</table-wrap-foot>
</table-wrap>
</sec>
<sec id="sec15">
<label>3.2</label>
<title>Assessment of the interaction role of participants&#x2019; gender and age</title>
<p>The results of the initial ANOVAs assessing the interaction parameters between emotion and eye size with participants&#x2019; gender and age are displayed in <xref ref-type="table" rid="tab3">Table 3</xref>. The second order interactions (emotion &#x00D7; eye size &#x00D7; gender, and emotion &#x00D7; eye size &#x00D7; age) were excluded from the ANOVA procedures based on the results achieved by these parameters (not statistically significant [<italic>p</italic>&#x2009;&#x003E;&#x2009;0.05] and poor effect size [&#x03B7;<sub>p</sub><sup>2</sup>&#x2009;&#x003C;&#x2009;0.10]). The participants&#x2019; age was also excluded from the ANOVA because the first order interactions (emotion &#x00D7; age, and eye size &#x00D7; age) also obtained non-statistical significance or irrelevant effect size. The between-subject factor was retained in the ANOVA that was carried out for the intensity level measure, based on the observed statistically significant interaction eye size &#x00D7; gender.</p>
<table-wrap position="float" id="tab3">
<label>Table 3</label>
<caption><p>Assessment of the first and second order interactions with participants&#x2019; gender and age.</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th/>
<th align="center" valign="top" colspan="3">Emotion &#x00D7; gender</th>
<th align="center" valign="top" colspan="3">Eye size &#x00D7; gender</th>
<th align="center" valign="top" colspan="3">Emotion &#x00D7; eye size &#x00D7; gender</th>
</tr>
<tr>
<th/>
<th align="center" valign="top"><italic>F</italic></th>
<th align="center" valign="top"><italic>p</italic></th>
<th align="center" valign="top">&#x03B7;<italic><sub>p</sub></italic><sup>2</sup></th>
<th align="center" valign="top"><italic>F</italic></th>
<th align="center" valign="top"><italic>p</italic></th>
<th align="center" valign="top">&#x03B7;<italic><sub>p</sub></italic><sup>2</sup></th>
<th align="center" valign="top"><italic>F</italic></th>
<th align="center" valign="top"><italic>p</italic></th>
<th align="center" valign="top">&#x03B7;<italic><sub>p</sub></italic><sup>2</sup></th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Intensity</td>
<td align="char" valign="top" char=".">0.776</td>
<td align="char" valign="top" char=".">0.493</td>
<td align="char" valign="top" char=".">0.013</td>
<td align="char" valign="top" char=".">5.417</td>
<td align="char" valign="top" char="."><bold>0.010&#x002A;</bold></td>
<td align="char" valign="top" char=".">0.085</td>
<td align="char" valign="top" char=".">0.940</td>
<td align="char" valign="top" char=".">0.464</td>
<td align="char" valign="top" char=".">0.016</td>
</tr>
<tr>
<td align="left" valign="top">Attractiveness</td>
<td align="char" valign="top" char=".">0.278</td>
<td align="char" valign="top" char=".">0.726</td>
<td align="char" valign="top" char=".">0.005</td>
<td align="char" valign="top" char=".">0.647</td>
<td align="char" valign="top" char=".">0.497</td>
<td align="char" valign="top" char=".">0.011</td>
<td align="char" valign="top" char=".">0.911</td>
<td align="char" valign="top" char=".">0.495</td>
<td align="char" valign="top" char=".">0.015</td>
</tr>
<tr>
<td/>
<td align="center" valign="top" colspan="3">Emotion &#x00D7; age</td>
<td align="center" valign="top" colspan="3">Eye size &#x00D7; age</td>
<td align="center" valign="top" colspan="3">Emotion &#x00D7; eye size &#x00D7; age</td>
</tr>
<tr>
<td align="left" valign="top">Intensity</td>
<td align="char" valign="top" char=".">0.213</td>
<td align="char" valign="top" char=".">0.871</td>
<td align="char" valign="top" char=".">0.003</td>
<td align="char" valign="top" char=".">0.625</td>
<td align="char" valign="top" char=".">0.501</td>
<td align="char" valign="top" char=".">0.010</td>
<td align="char" valign="top" char=".">0.779</td>
<td align="char" valign="top" char=".">0.586</td>
<td align="char" valign="top" char=".">0.013</td>
</tr>
<tr>
<td align="left" valign="top">Attractiveness</td>
<td align="char" valign="top" char=".">0.691</td>
<td align="char" valign="top" char=".">0.485</td>
<td align="char" valign="top" char=".">0.011</td>
<td align="char" valign="top" char=".">2.109</td>
<td align="char" valign="top" char=".">0.134</td>
<td align="char" valign="top" char=".">0.033</td>
<td align="char" valign="top" char=".">0.604</td>
<td align="char" valign="top" char=".">0.748</td>
<td align="char" valign="top" char=".">0.010</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<p>&#x03B7;<italic><sub>p</sub></italic><sup>2</sup>: partial eta squared. &#x002A;Bold: significant parameter.</p>
</table-wrap-foot>
</table-wrap>
</sec>
<sec id="sec16">
<label>3.3</label>
<title>Association between eye size and intensity</title>
<p>The ANOVA performed for intensity level scores included eye size (small, unchanged, and large) and emotion type (neutral, happiness, fear, sadness, and anger) as within-subjects factors, and participants&#x2019; gender (female, and male) as between-subjects factors. The complete results of this model are displayed in <xref rid="SM1" ref-type="supplementary-material">Supplementary Table S2</xref>.</p>
<p>The multivariate tests obtained non-statistical significance and an irrelevant effect size for the interaction emotion &#x00D7; eye size &#x00D7; gender (<italic>F</italic>&#x2009;=&#x2009;0.94, <italic>p</italic>&#x2009;=&#x2009;0.464, &#x03B7;<italic><sub>p</sub></italic><sup>2</sup>&#x2009;=&#x2009;0.016), emotion &#x00D7; eye size (<italic>F</italic>&#x2009;=&#x2009;0.66, <italic>p</italic>&#x2009;=&#x2009;0.680, &#x03B7;<italic><sub>p</sub></italic><sup>2</sup>&#x2009;=&#x2009;0.011) and emotion &#x00D7; gender (<italic>F</italic>&#x2009;=&#x2009;0.78, <italic>p</italic>&#x2009;=&#x2009;0.493, &#x03B7;<italic><sub>p</sub></italic><sup>2</sup>&#x2009;=&#x2009;0.013). Statistically significant results were achieved for the interaction parameter eye size &#x00D7; gender (<italic>F</italic>&#x2009;=&#x2009;5.42, <italic>p</italic>&#x2009;=&#x2009;0.010, &#x03B7;<italic><sub>p</sub></italic><sup>2</sup>&#x2009;=&#x2009;0.085). Based on these results, single effects were estimated for the eye size factor (that is, pairwise comparisons between small, unchanged, and large eye sizes were obtained, separately for female and male participants), and main effects were estimated for the emotion type factor.</p>
<p><xref ref-type="fig" rid="fig3">Figure 3A</xref> displays the graphic visualization of the interaction between eye size and participants&#x2019; gender. Among female individuals, differences were observed comparing unchanged eyes with larger eye size faces (<italic>p</italic>&#x2009;=&#x2009;0.026). Among male participants, differences were identified across all comparisons (small versus unchanged: <italic>p</italic>&#x2009;=&#x2009;0.026; small versus large: <italic>p</italic>&#x2009;&#x003C;&#x2009;0.001; unchanged versus large: <italic>p</italic>&#x2009;=&#x2009;0.001). It is worth noting that the comparison between genders suggested that male (compared to female) subjects tended to report higher intensity levels for unchanged and large eye size faces.</p>
<fig position="float" id="fig3">
<label>Figure 3</label>
<caption><p>Visualization of the eye size effects on the intensity and the attractiveness levels. <bold>(A)</bold> Means for the intensity levels (Y-axis) based on the eye size (X-axis), stratified by the participants&#x2019; gender. <bold>(B)</bold> Means for the intensity levels (Y-axis) based on the emotion type (X-axis). <bold>(C)</bold> Means for the attractiveness levels (Y-axis) based on the eye size (X-axis), stratified by the emotion type. <bold>(D)</bold> Means for the attractiveness levels (Y-axis) based on the emotion type (X-axis), stratified by the eye size.</p></caption>
<graphic xlink:href="fpsyg-15-1421707-g003.tif"/>
</fig>
<p>Regarding the differences for intensity levels based on emotion type (<xref ref-type="fig" rid="fig3">Figure 3B</xref>), the lowest intensity mean score was associated with neutral faces and the highest with fearful ones. The contrasts between emotion types showed relevant differences across all pairwise comparisons (<italic>p</italic>&#x2009;&#x003C;&#x2009;0.001), except for the difference between happy and angry faces (<italic>p</italic>&#x2009;=&#x2009;0.396).</p>
</sec>
<sec id="sec17">
<label>3.4</label>
<title>Association between eye size and attractiveness level</title>
<p>The ANOVA obtained for attractiveness level included eye size (small, unchanged, and large) and emotion type (neutral, happiness, fear, sadness, and anger) as within-subjects factors. The complete results of this model are displayed in <xref rid="SM1" ref-type="supplementary-material">Supplementary Table S3</xref>.</p>
<p>Since the interaction parameter eye size &#x00D7; emotion reached statistically significant results (<italic>F</italic>&#x2009;=&#x2009;4.17, <italic>p</italic>&#x2009;=&#x2009;0.001, &#x03B7;<italic><sub>p</sub></italic><sup>2</sup>&#x2009;=&#x2009;0.063), single effects were estimated and interpreted to assess the differences based on the eye size and the emotion type. <xref ref-type="fig" rid="fig3">Figure 3C</xref> displays the graphic visualization of this interaction effect. Regarding the eye size factor, all the pairwise comparisons between small eye size faces and unchanged and large reached relevant differences (<italic>p</italic>&#x2009;&#x003C;&#x2009;0.05), independently of the emotion. The mean comparisons between unchanged and large eye size faces showed no differences (independently of the emotion). For the emotion type factor, the highest attractiveness level was associated with happy faces across all eye size comparisons (<xref ref-type="fig" rid="fig3">Figure 3D</xref>).</p>
</sec>
</sec>
<sec sec-type="discussion" id="sec18">
<label>4</label>
<title>Discussion</title>
<p>The aim of this study was to obtain empirical evidence regarding the association between eye size and the perceived intensity and attractiveness of different emotions, in an experimental task based on the visualization of female faces. The comparison of the intensity levels showed an interaction with participants&#x2019; gender: within the male participants, differences were found across all eye size comparisons. Within the females group, differences were only found when comparing the unchanged and larger eyes. No interaction was found between gender and emotion type. The emotion perceived as the most intense was fear, and neutral faces were rated with the lowest intensity. No interaction was found when comparing age by emotion and age by eye size. Regarding the perception of attractiveness, no significant interactions were found with either age or gender. The lowest mean was associated with the smaller eyes, with no differences between the unchanged and larger eyes. The most attractive emotion was happiness.</p>
<p>The results of this study are based solely on the visualization of images of female faces. Therefore, they should not be considered representative of or generalizable to both men and women.</p>
<p>Neutral expressions were expected to be perceived as the least intense (<xref ref-type="bibr" rid="ref19">Garrido and Prada, 2017</xref>), and fearful ones the most intense (<xref ref-type="bibr" rid="ref16">Ekman and Friesen, 1975</xref>; <xref ref-type="bibr" rid="ref5">Becker, 2012</xref>; <xref ref-type="bibr" rid="ref53">&#x017B;urowska et al., 2018</xref>). Happy and angry faces were expected to be equally intense, and sadness more intense than neutral, but less than the other three (<xref ref-type="bibr" rid="ref49">Ueda et al., 2016</xref>; <xref ref-type="bibr" rid="ref19">Garrido and Prada, 2017</xref>). The results of this study support these findings, and outline the premise that codification of face expressions is related to eye size. Moreover, participants&#x2019; gender reached a moderator role in the relationships.</p>
<p>The results of our study also suggest that attractiveness ratings are not dependent on the participants&#x2019; age or gender. However, differences have been found associated with the emotion type. We hypothesized that happy expressions should be rated as more attractive than all the others, and neutral more attractive than fearful, sad and angry (<xref ref-type="bibr" rid="ref21">Golle et al., 2013</xref>; <xref ref-type="bibr" rid="ref49">Ueda et al., 2016</xref>; <xref ref-type="bibr" rid="ref19">Garrido and Prada, 2017</xref>). Some of these differences have been observed. Happy faces were more attractive than all the rest. Neutral, fear, sadness and anger were not rated as different from each other except for the comparison neutral-fear with large eyes, and neutral-anger with small eyes. In regard to attractiveness and eye size, bigger eyes were expected to be perceived as more attractive than smaller eyes (<xref ref-type="bibr" rid="ref20">Glocker et al., 2009</xref>; <xref ref-type="bibr" rid="ref40">Przylipiak et al., 2018</xref>). In particular, it was expected for large eyes to be rated as more attractive than unchanged and small. A difference was also anticipated between unchanged and smaller eyes, as previously found by <xref ref-type="bibr" rid="ref20">Glocker et al. (2009)</xref>. A main effect of eye size on attractiveness was replicated, but the difference was not found where it was expected: participants found smaller eyes to be less attractive than unchanged and larger; but there was no difference between unchanged and larger.</p>
<p>Overall, the results of this study suggest that emotions are perceived more intensely when the eye size is digitally increased in women&#x2019;s faces, and that these changes are gender-dependent, men being more susceptible to changes than women. Noticeably, neutral faces with smaller eyes were rated as less intense than larger and unchanged (as the rest of emotions). This phenomenon&#x2014;&#x201C;more intense neutrality&#x201D;&#x2014;may suggest that larger eyes not only intensify emotions, but also increase the perceived intensity of the face in its entirety, which supports the theory that emotions are, at least partially, holistically perceived (<xref ref-type="bibr" rid="ref37">Omigbodun and Cottrell, 2013</xref>), as bigger eyes intensify the message projected by the rest of the face.</p>
<p><xref ref-type="bibr" rid="ref20">Glocker et al. (2009)</xref> suggested that larger eyes in babies lead to a higher perception of attractiveness, which motivates caretaking behavior, thus increasing the child&#x2019;s chances of survival. The phenomenon &#x201C;the bigger the eyes, the more intense the emotion&#x201D; supports the theory that emotions are perceived as more intense due to evolutive mechanisms: a sad baby with bigger eyes might seem sadder, which might prompt caregivers to take care of them more urgently. Both <xref ref-type="bibr" rid="ref20">Glocker et al. (2009)</xref>&#x2019;s attractiveness-caretaking behavior theory, and the theory &#x201C;the bigger the eyes, the more intense the emotion&#x201D; support the idea that bigger eyes increase the chances of survival by increasing both the intensity of the emotion on display, and the attractiveness of the face. This phenomenon is also applicable to adulthood age, as bigger eyes also make adult faces gain in attractiveness (<xref ref-type="bibr" rid="ref6">Berry and McArthur, 1985</xref>; <xref ref-type="bibr" rid="ref47">Thornhill and Gangestad, 1999</xref>; <xref ref-type="bibr" rid="ref34">Matsushita et al., 2015b</xref>; <xref ref-type="bibr" rid="ref40">Przylipiak et al., 2018</xref>) and intensity, as seen in this study.</p>
<p>The results of the present study support the premise that eye size has an impact on attractiveness: faces with larger eyes are generally perceived as more attractive than those with smaller eyes (<xref ref-type="bibr" rid="ref20">Glocker et al., 2009</xref>; <xref ref-type="bibr" rid="ref40">Przylipiak et al., 2018</xref>). However, our data also indicated that bigger than normal eyes do not enhance attractiveness any more than unchanged eyes. This is a novel finding, as previous literature has repeatedly suggested that bigger eyes are perceived as more attractive, especially in happy faces (<xref ref-type="bibr" rid="ref6">Berry and McArthur, 1985</xref>; <xref ref-type="bibr" rid="ref20">Glocker et al., 2009</xref>; <xref ref-type="bibr" rid="ref34">Matsushita et al., 2015b</xref>; <xref ref-type="bibr" rid="ref49">Ueda et al., 2016</xref>; <xref ref-type="bibr" rid="ref40">Przylipiak et al., 2018</xref>; <xref ref-type="bibr" rid="ref25">Hine and Okubo, 2021</xref>). In any case, our results should be interpreted considering that our research tested increase and decrease changes of 15%, while other experiments have tested smaller changes in the 5% or 10% range (<xref ref-type="bibr" rid="ref40">Przylipiak et al., 2018</xref>). The modifications used in our study might have altered the facial expression too much making the images reach the uncanny valley, resulting in faces that were perceived as uncomfortable (missing any chance of gaining in attractiveness).</p>
</sec>
<sec id="sec19">
<label>5</label>
<title>Limitations</title>
<p>The results of this work should be interpreted in accordance with certain limitations. First, the asymmetrical distribution of the participants&#x2019; gender (the proportion of male individuals was lower than female individuals) should be considered for generalization purposes.</p>
<p>Second, the female faces visualized in the experimental task showed characteristics in addition to the face area, such as hair, and therefore differences in attributes such as hairstyles could have affected attractiveness ratings due to individual preferences. Furthermore, the variety of faces in the experimental task was relatively low, which limited the possibility of including intra-ethnical differences. However, it must be considered that the high number of experimental conditions in our study (three eye size values multiplied by five emotion types) did not allow a greater number of images, otherwise the experiment would have been too long and tiring for the participants.</p>
<p>In relation to the previous point, the study was carried out using images of female faces. Other replication studies including male faces are necessary for generalization purposes, mostly because females are usually perceived as more attractive and/or intense than males (<xref ref-type="bibr" rid="ref19">Garrido and Prada, 2017</xref>). Lastly, additional research that explores a wider range of emotions (including other primary emotions such as surprise or contempt) is necessary to further understand the role of eye size in the interpretation of facial expressions.</p>
</sec>
<sec sec-type="conclusions" id="sec20">
<label>6</label>
<title>Conclusion</title>
<p>To our knowledge, this is the first study addressing the combined impact of eye size and emotions on the perceived intensity and attractiveness of facial expressions, considering the potential role of the participants&#x2019; gender and age. The results indicate that the eyes act as intensifiers of emotions, particularly among male respondents, and that facial emotional expressions are, at least partially, holistically perceived. The results of this work also show that the attractiveness ratings were affected by eye size, with smaller eyes perceived as less attractive, independently of the emotion. This evidence could be interpreted as an evolutionary phenomenon.</p>
</sec>
<sec sec-type="data-availability" id="sec21">
<title>Data availability statement</title>
<p>The datasets presented in this article are not readily available because restrictions apply to the datasets: limitations to making data publicly available include participant privacy, in accordance with the ethical consent provided by participants on the use of confidential/identifiable human data. Requests to access the datasets should be directed to AEJ, <email>Alanis.Este@autonoma.cat</email>.</p>
</sec>
<sec sec-type="ethics-statement" id="sec22">
<title>Ethics statement</title>
<p>The studies involving humans were approved by Ethics board of the Department of Psychological Sciences, Birkbeck College, University of London (approval number: 2122068, date of approval: 09/05/2022). The studies were conducted in accordance with the local legislation and institutional requirements. Participants provided informed consent by ticking a box within the website before starting the experiment. Written informed consent was obtained from the individual(s) for the publication of any identifiable images or data included in this article.</p>
</sec>
<sec sec-type="author-contributions" id="sec23">
<title>Author contributions</title>
<p>AEJ: Conceptualization, Data curation, Formal analysis, Investigation, Methodology, Project administration, Writing &#x2013; original draft, Writing &#x2013; review &#x0026; editing. RG: Formal analysis, Writing &#x2013; original draft, Writing &#x2013; review &#x0026; editing.</p>
</sec>
</body>
<back>
<sec sec-type="funding-information" id="sec24">
<title>Funding</title>
<p>The author(s) declare that no financial support was received for the research, authorship, and/or publication of this article.</p>
</sec>
<ack>
<p>The authors would like to thank Dr. Marie Smith, Ruben Zamora, and Jos&#x00E9; Tom&#x00E1;s Est&#x00E9; for their invaluable guidance and support throughout this research. RG was supported by The Catalan Institution for Research and Advanced Studies (ICREA Academia Program, 2021).</p>
</ack>
<sec sec-type="COI-statement" id="sec25">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="disclaimer" id="sec26">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<sec sec-type="supplementary-material" id="sec27">
<title>Supplementary material</title>
<p>The Supplementary material for this article can be found online at: <ext-link xlink:href="https://www.frontiersin.org/articles/10.3389/fpsyg.2024.1421707/full#supplementary-material" ext-link-type="uri">https://www.frontiersin.org/articles/10.3389/fpsyg.2024.1421707/full#supplementary-material</ext-link></p>
<supplementary-material xlink:href="Table_1.docx" id="SM1" mimetype="application/vnd.openxmlformats-officedocument.wordprocessingml.document" xmlns:xlink="http://www.w3.org/1999/xlink"/>
</sec>
<ref-list>
<title>References</title>
<ref id="ref1"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Aday</surname> <given-names>J. S.</given-names></name> <name><surname>Fang</surname> <given-names>L.</given-names></name> <name><surname>Carlson</surname> <given-names>J. M.</given-names></name></person-group> (<year>2023</year>). <article-title>Eye-size effects in the dot-probe task: greater sclera exposure predicts delayed disengagement from fearful faces</article-title>. <source>PLoS One</source> <volume>18</volume>:<fpage>e0285839</fpage>. doi: <pub-id pub-id-type="doi">10.1371/journal.pone.0285839</pub-id>, PMID: <pub-id pub-id-type="pmid">37195990</pub-id></citation></ref>
<ref id="ref2"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Baron-Cohen</surname> <given-names>S.</given-names></name> <name><surname>Wheelwright</surname> <given-names>S.</given-names></name> <name><surname>Hill</surname> <given-names>J.</given-names></name> <name><surname>Raste</surname> <given-names>Y.</given-names></name> <name><surname>Plumb</surname> <given-names>I.</given-names></name></person-group> (<year>2001</year>). <article-title>The &#x201C;Reading the mind in the eyes&#x201D; test revised version: a study with Normal adults, and adults with Asperger syndrome or high-functioning autism</article-title>. <source>J. Child Psychol. Psychiatry</source> <volume>42</volume>, <fpage>241</fpage>&#x2013;<lpage>251</lpage>. doi: <pub-id pub-id-type="doi">10.1111/1469-7610.00715</pub-id></citation></ref>
<ref id="ref3"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Barrett</surname> <given-names>L. F.</given-names></name></person-group> (<year>2018</year>). <article-title>Seeing fear: It&#x2019;s all in the eyes?</article-title> <source>Trends Neurosci.</source> <volume>41</volume>, <fpage>559</fpage>&#x2013;<lpage>563</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.tins.2018.06.009</pub-id>, PMID: <pub-id pub-id-type="pmid">30143181</pub-id></citation></ref>
<ref id="ref4"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bartlett</surname> <given-names>J. C.</given-names></name> <name><surname>Searcy</surname> <given-names>J.</given-names></name></person-group> (<year>1993</year>). <article-title>Inversion and configuration of faces</article-title>. <source>Cogn. Psychol.</source> <volume>25</volume>, <fpage>281</fpage>&#x2013;<lpage>316</lpage>. doi: <pub-id pub-id-type="doi">10.1006/cogp.1993.1007</pub-id></citation></ref>
<ref id="ref5"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Becker</surname> <given-names>M. W.</given-names></name></person-group> (<year>2012</year>). <article-title>Negative emotional photographs are identified more slowly than positive photographs</article-title>. <source>Atten. Percept. Psychophys.</source> <volume>74</volume>, <fpage>1241</fpage>&#x2013;<lpage>1251</lpage>. doi: <pub-id pub-id-type="doi">10.3758/s13414-012-0308-x</pub-id></citation></ref>
<ref id="ref6"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Berry</surname> <given-names>D. S.</given-names></name> <name><surname>McArthur</surname> <given-names>L. Z.</given-names></name></person-group> (<year>1985</year>). <article-title>Some components and consequences of a babyface</article-title>. <source>J. Pers. Soc. Psychol.</source> <volume>48</volume>, <fpage>312</fpage>&#x2013;<lpage>323</lpage>. doi: <pub-id pub-id-type="doi">10.1037/0022-3514.48.2.312</pub-id></citation></ref>
<ref id="ref7"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Blais</surname> <given-names>C.</given-names></name> <name><surname>Roy</surname> <given-names>C.</given-names></name> <name><surname>Fiset</surname> <given-names>D.</given-names></name> <name><surname>Arguin</surname> <given-names>M.</given-names></name> <name><surname>Gosselin</surname> <given-names>F.</given-names></name></person-group> (<year>2012</year>). <article-title>The eyes are not the window to basic emotions</article-title>. <source>Neuropsychologia</source> <volume>50</volume>, <fpage>2830</fpage>&#x2013;<lpage>2838</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neuropsychologia.2012.08.010</pub-id></citation></ref>
<ref id="ref8"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Calvo</surname> <given-names>M. G.</given-names></name> <name><surname>Guti&#x00E9;rrez-Garc&#x00ED;a</surname> <given-names>A.</given-names></name> <name><surname>Del L&#x00ED;bano</surname> <given-names>M.</given-names></name></person-group> (<year>2016</year>). <article-title>What makes a smiling face look happy? Visual saliency, distinctiveness, and affect</article-title>. <source>Psychol. Res.</source> <volume>82</volume>, <fpage>296</fpage>&#x2013;<lpage>309</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s00426-016-0829-3</pub-id></citation></ref>
<ref id="ref9"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Carbon</surname> <given-names>C.-C.</given-names></name></person-group> (<year>2020</year>). <article-title>Wearing face masks strongly confuses counterparts in reading emotions</article-title>. <source>Front. Psychol.</source> <volume>11</volume>:<fpage>566886</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2020.566886</pub-id>, PMID: <pub-id pub-id-type="pmid">33101135</pub-id></citation></ref>
<ref id="ref10"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Carlson</surname> <given-names>J. M.</given-names></name> <name><surname>Aday</surname> <given-names>J.</given-names></name></person-group> (<year>2018</year>). <article-title>In the presence of conflicting gaze cues, fearful expression and eye-size guide attention</article-title>. <source>Cognit. Emot.</source> <volume>32</volume>, <fpage>1178</fpage>&#x2013;<lpage>1188</lpage>. doi: <pub-id pub-id-type="doi">10.1080/02699931.2017.1391065</pub-id>, PMID: <pub-id pub-id-type="pmid">29047315</pub-id></citation></ref>
<ref id="ref11"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cheetham</surname> <given-names>M.</given-names></name></person-group> (<year>2011</year>). <article-title>The human likeness dimension of the &#x201C;uncanny valley hypothesis&#x201D;: behavioral and functional MRI findings</article-title>. <source>Front. Hum. Neurosci.</source> <volume>5</volume>:<fpage>126</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fnhum.2011.00126</pub-id>, PMID: <pub-id pub-id-type="pmid">22131970</pub-id></citation></ref>
<ref id="ref12"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Conley</surname> <given-names>M. I.</given-names></name> <name><surname>Dellarco</surname> <given-names>D. V.</given-names></name> <name><surname>Rubien-Thomas</surname> <given-names>E.</given-names></name> <name><surname>Cohen</surname> <given-names>A. O.</given-names></name> <name><surname>Cervera</surname> <given-names>A.</given-names></name> <name><surname>Tottenham</surname> <given-names>N.</given-names></name> <etal/></person-group>. (<year>2018</year>). <article-title>The racially diverse affective expression (RADIATE) face stimulus set</article-title>. <source>Psychiatry Res.</source> <volume>270</volume>, <fpage>1059</fpage>&#x2013;<lpage>1067</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.psychres.2018.04.066</pub-id>, PMID: <pub-id pub-id-type="pmid">29910020</pub-id></citation></ref>
<ref id="ref13"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Curby</surname> <given-names>K. M.</given-names></name> <name><surname>Johnson</surname> <given-names>K. J.</given-names></name> <name><surname>Tyson</surname> <given-names>A.</given-names></name></person-group> (<year>2012</year>). <article-title>Face to face with emotion: holistic face processing is modulated by emotional state</article-title>. <source>Cognit. Emot.</source> <volume>26</volume>, <fpage>93</fpage>&#x2013;<lpage>102</lpage>. doi: <pub-id pub-id-type="doi">10.1080/02699931.2011.555752</pub-id></citation></ref>
<ref id="ref14"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dunlap</surname> <given-names>K.</given-names></name></person-group> (<year>1927</year>). <article-title>The role of eye-muscles and mouth-muscles in the expression of the emotions</article-title>. <source>Genet. Psychol. Monogr.</source> <volume>2</volume>, <fpage>196</fpage>&#x2013;<lpage>233</lpage>.</citation></ref>
<ref id="ref15"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Eisenbarth</surname> <given-names>H.</given-names></name> <name><surname>Alpers</surname> <given-names>G. W.</given-names></name></person-group> (<year>2011</year>). <article-title>Happy mouth and sad eyes: scanning emotional facial expressions</article-title>. <source>Emotion</source> <volume>11</volume>, <fpage>860</fpage>&#x2013;<lpage>865</lpage>. doi: <pub-id pub-id-type="doi">10.1037/a0022758</pub-id>, PMID: <pub-id pub-id-type="pmid">21859204</pub-id></citation></ref>
<ref id="ref16"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Ekman</surname> <given-names>P.</given-names></name> <name><surname>Friesen</surname> <given-names>W. V.</given-names></name></person-group> (<year>1975</year>). <source>Unmasking the face. A guide to recognizing emotions from facial clues</source>. <publisher-loc>Hoboken, New Jersey, United States</publisher-loc>: <publisher-name>Prentice-Hall, Cop</publisher-name>.</citation></ref>
<ref id="ref17"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ekman</surname> <given-names>P.</given-names></name> <name><surname>Sorenson</surname> <given-names>E. R.</given-names></name> <name><surname>Friesen</surname> <given-names>W. V.</given-names></name></person-group> (<year>1969</year>). <article-title>Pan-cultural elements in facial displays of emotion</article-title>. <source>Science</source> <volume>164</volume>, <fpage>86</fpage>&#x2013;<lpage>88</lpage>. doi: <pub-id pub-id-type="doi">10.1126/science.164.3875.86</pub-id>, PMID: <pub-id pub-id-type="pmid">5773719</pub-id></citation></ref>
<ref id="ref18"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fern&#x00E1;ndez-Mart&#x00ED;n</surname> <given-names>A.</given-names></name> <name><surname>Guti&#x00E9;rrez-Garc&#x00ED;a</surname> <given-names>A.</given-names></name> <name><surname>Calvo</surname> <given-names>M. G.</given-names></name></person-group> (<year>2013</year>). <article-title>A smile radiates outwards and biases the eye expression</article-title>. <source>Span. J. Psychol.</source> <volume>16</volume>:<fpage>E53</fpage>. doi: <pub-id pub-id-type="doi">10.1017/sjp.2013.68</pub-id></citation></ref>
<ref id="ref19"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Garrido</surname> <given-names>M. V.</given-names></name> <name><surname>Prada</surname> <given-names>M.</given-names></name></person-group> (<year>2017</year>). <article-title>KDEF-PT: valence, emotional intensity, familiarity and attractiveness ratings of angry, neutral, and happy faces</article-title>. <source>Front. Psychol.</source> <volume>8</volume>:<fpage>2181</fpage>. doi: <pub-id pub-id-type="doi">10.3389/fpsyg.2017.02181</pub-id>, PMID: <pub-id pub-id-type="pmid">29312053</pub-id></citation></ref>
<ref id="ref20"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Glocker</surname> <given-names>M. L.</given-names></name> <name><surname>Langleben</surname> <given-names>D. D.</given-names></name> <name><surname>Ruparel</surname> <given-names>K.</given-names></name> <name><surname>Loughead</surname> <given-names>J. W.</given-names></name> <name><surname>Valdez</surname> <given-names>J. N.</given-names></name> <name><surname>Griffin</surname> <given-names>M. D.</given-names></name> <etal/></person-group>. (<year>2009</year>). <article-title>Baby schema modulates the brain reward system in nulliparous women</article-title>. <source>Proc. Natl. Acad. Sci.</source> <volume>106</volume>, <fpage>9115</fpage>&#x2013;<lpage>9119</lpage>. doi: <pub-id pub-id-type="doi">10.1073/pnas.0811620106</pub-id>, PMID: <pub-id pub-id-type="pmid">19451625</pub-id></citation></ref>
<ref id="ref21"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Golle</surname> <given-names>J.</given-names></name> <name><surname>Mast</surname> <given-names>F. W.</given-names></name> <name><surname>Lobmaier</surname> <given-names>J. S.</given-names></name></person-group> (<year>2013</year>). <article-title>Something to smile about: the interrelationship between attractiveness and emotional expression</article-title>. <source>Cognit. Emot.</source> <volume>28</volume>, <fpage>298</fpage>&#x2013;<lpage>310</lpage>. doi: <pub-id pub-id-type="doi">10.1080/02699931.2013.817383</pub-id>, PMID: <pub-id pub-id-type="pmid">23875865</pub-id></citation></ref>
<ref id="ref22"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Gong</surname> <given-names>X.</given-names></name> <name><surname>Wong</surname> <given-names>N.</given-names></name> <name><surname>Wang</surname> <given-names>D.</given-names></name></person-group> (<year>2018</year>). <article-title>Are gender differences in emotion culturally universal? Comparison of emotional intensity between Chinese and German samples</article-title>. <source>J. Cross-Cult. Psychol.</source> <volume>49</volume>, <fpage>993</fpage>&#x2013;<lpage>1005</lpage>. doi: <pub-id pub-id-type="doi">10.1177/0022022118768434</pub-id></citation></ref>
<ref id="ref23"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Grammer</surname> <given-names>K.</given-names></name> <name><surname>Thornhill</surname> <given-names>R.</given-names></name></person-group> (<year>1994</year>). <article-title>Human (Homo sapiens) facial attractiveness and sexual selection: the role of symmetry and averageness</article-title>. <source>J. Comp. Psychol.</source> <volume>108</volume>, <fpage>233</fpage>&#x2013;<lpage>242</lpage>. doi: <pub-id pub-id-type="doi">10.1037/0735-7036.108.3.233</pub-id>, PMID: <pub-id pub-id-type="pmid">7924253</pub-id></citation></ref>
<ref id="ref24"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hama</surname> <given-names>T.</given-names></name> <name><surname>Koeda</surname> <given-names>M.</given-names></name></person-group> (<year>2023</year>). <article-title>Characteristics of healthy Japanese young adults with respect to recognition of facial expressions: a preliminary study</article-title>. <source>BMC Psychol.</source> <volume>11</volume>:<fpage>237</fpage>. doi: <pub-id pub-id-type="doi">10.1186/s40359-023-01281-5</pub-id>, PMID: <pub-id pub-id-type="pmid">37592360</pub-id></citation></ref>
<ref id="ref25"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hine</surname> <given-names>K.</given-names></name> <name><surname>Okubo</surname> <given-names>H.</given-names></name></person-group> (<year>2021</year>). <article-title>Overestimation of eye size: people see themselves with bigger eyes in a holistic approach</article-title>. <source>Acta Psychol.</source> <volume>220</volume>:<fpage>103419</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.actpsy.2021.103419</pub-id></citation></ref>
<ref id="ref26"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jack</surname> <given-names>R. E.</given-names></name> <name><surname>Garrod</surname> <given-names>O. G. B.</given-names></name> <name><surname>Yu</surname> <given-names>H.</given-names></name> <name><surname>Caldara</surname> <given-names>R.</given-names></name> <name><surname>Schyns</surname> <given-names>P. G.</given-names></name></person-group> (<year>2012</year>). <article-title>Facial expressions of emotion are not culturally universal</article-title>. <source>Proc. Natl. Acad. Sci.</source> <volume>109</volume>, <fpage>7241</fpage>&#x2013;<lpage>7244</lpage>. doi: <pub-id pub-id-type="doi">10.1073/pnas.1200155109</pub-id>, PMID: <pub-id pub-id-type="pmid">22509011</pub-id></citation></ref>
<ref id="ref27"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kim</surname> <given-names>G.</given-names></name> <name><surname>Seong</surname> <given-names>S. H.</given-names></name> <name><surname>Hong</surname> <given-names>S.-S.</given-names></name> <name><surname>Choi</surname> <given-names>E.</given-names></name></person-group> (<year>2022</year>). <article-title>Impact of face masks and sunglasses on emotion recognition in south Koreans</article-title>. <source>PLoS One</source> <volume>17</volume>:<fpage>e0263466</fpage>. doi: <pub-id pub-id-type="doi">10.1371/journal.pone.0263466</pub-id>, PMID: <pub-id pub-id-type="pmid">35113970</pub-id></citation></ref>
<ref id="ref28"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kranz</surname> <given-names>F.</given-names></name> <name><surname>Ishai</surname> <given-names>A.</given-names></name></person-group> (<year>2006</year>). <article-title>Face perception is modulated by sexual preference</article-title>. <source>Curr. Biol.</source> <volume>16</volume>, <fpage>63</fpage>&#x2013;<lpage>68</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.cub.2005.10.070</pub-id></citation></ref>
<ref id="ref29"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Langbehn</surname> <given-names>A. T.</given-names></name> <name><surname>Yermol</surname> <given-names>D. A.</given-names></name> <name><surname>Zhao</surname> <given-names>F.</given-names></name> <name><surname>Thorstenson</surname> <given-names>C. A.</given-names></name> <name><surname>Niedenthal</surname> <given-names>P. M.</given-names></name></person-group> (<year>2022</year>). <article-title>Wearing N95, surgical, and cloth face masks compromises the perception of emotion</article-title>. <source>Affect. Sci.</source> <volume>3</volume>, <fpage>105</fpage>&#x2013;<lpage>117</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s42761-021-00097-z</pub-id>, PMID: <pub-id pub-id-type="pmid">35098149</pub-id></citation></ref>
<ref id="ref30"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lee</surname> <given-names>D. H.</given-names></name> <name><surname>Anderson</surname> <given-names>A. K.</given-names></name></person-group> (<year>2017</year>). <article-title>Reading what the mind thinks from how the eye sees</article-title>. <source>Psychol. Sci.</source> <volume>28</volume>, <fpage>494</fpage>&#x2013;<lpage>503</lpage>. doi: <pub-id pub-id-type="doi">10.1177/0956797616687364</pub-id></citation></ref>
<ref id="ref31"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Levine</surname> <given-names>T. R.</given-names></name> <name><surname>Hullet</surname> <given-names>C. R.</given-names></name></person-group> (<year>2002</year>). <article-title>Eta squared, partial eta squared, and misreporting of effect size in communication research</article-title>. <source>Hum. Commun. Res.</source> <volume>28</volume>, <fpage>612</fpage>&#x2013;<lpage>625</lpage>. doi: <pub-id pub-id-type="doi">10.1111/j.1468-2958.2002.tb00828.x</pub-id></citation></ref>
<ref id="ref32"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Levy</surname> <given-names>B.</given-names></name> <name><surname>Ariely</surname> <given-names>D.</given-names></name> <name><surname>Mazar</surname> <given-names>N.</given-names></name> <name><surname>Chi</surname> <given-names>W.</given-names></name> <name><surname>Lukas</surname> <given-names>S.</given-names></name> <name><surname>Elman</surname> <given-names>I.</given-names></name></person-group> (<year>2008</year>). <article-title>Gender differences in the motivational processing of facial beauty</article-title>. <source>Learn. Motiv.</source> <volume>39</volume>, <fpage>136</fpage>&#x2013;<lpage>145</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.lmot.2007.09.002</pub-id>, PMID: <pub-id pub-id-type="pmid">24282336</pub-id></citation></ref>
<ref id="ref33"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Matsushita</surname> <given-names>S.</given-names></name> <name><surname>Morikawa</surname> <given-names>K.</given-names></name> <name><surname>Mitsuzane</surname> <given-names>S.</given-names></name> <name><surname>Yamanami</surname> <given-names>H.</given-names></name></person-group> (<year>2015a</year>). <article-title>Eye shape illusions induced by eyebrow positions</article-title>. <source>Perception</source> <volume>44</volume>, <fpage>529</fpage>&#x2013;<lpage>540</lpage>. doi: <pub-id pub-id-type="doi">10.1068/p7823</pub-id></citation></ref>
<ref id="ref34"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Matsushita</surname> <given-names>S.</given-names></name> <name><surname>Morikawa</surname> <given-names>K.</given-names></name> <name><surname>Yamanami</surname> <given-names>H.</given-names></name></person-group> (<year>2015b</year>). <article-title>Measurement of eye size illusion caused by eyeliner, mascara, and eye shadow</article-title>. <source>J. Cosmet. Sci.</source> <volume>66</volume>, <fpage>161</fpage>&#x2013;<lpage>174</lpage>., PMID: <pub-id pub-id-type="pmid">26454904</pub-id></citation></ref>
<ref id="ref35"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Maurer</surname> <given-names>D.</given-names></name> <name><surname>Grand</surname> <given-names>R. L.</given-names></name> <name><surname>Mondloch</surname> <given-names>C. J.</given-names></name></person-group> (<year>2002</year>). <article-title>The many faces of configural processing</article-title>. <source>Trends Cogn. Sci.</source> <volume>6</volume>, <fpage>255</fpage>&#x2013;<lpage>260</lpage>. doi: <pub-id pub-id-type="doi">10.1016/s1364-6613(02)01903-4</pub-id>, PMID: <pub-id pub-id-type="pmid">12039607</pub-id></citation></ref>
<ref id="ref36"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mori</surname> <given-names>M.</given-names></name> <name><surname>MacDorman</surname> <given-names>K.</given-names></name> <name><surname>Kageki</surname> <given-names>N.</given-names></name></person-group> (<year>2012</year>). <article-title>The Uncanny Valley [from the field]</article-title>. <source>IEEE Robot. Automat. Mag.</source> <volume>19</volume>, <fpage>98</fpage>&#x2013;<lpage>100</lpage>. doi: <pub-id pub-id-type="doi">10.1109/mra.2012.2192811</pub-id></citation></ref>
<ref id="ref37"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Omigbodun</surname> <given-names>A.</given-names></name> <name><surname>Cottrell</surname> <given-names>G.</given-names></name></person-group> (<year>2013</year>). <article-title>Evidence for holistic facial expression processing with a Neurocomputational model</article-title>. <source>J. Vis.</source> <volume>13</volume>:<fpage>99</fpage>. doi: <pub-id pub-id-type="doi">10.1167/13.9.99</pub-id></citation></ref>
<ref id="ref38"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Oosterhof</surname> <given-names>N. N.</given-names></name> <name><surname>Todorov</surname> <given-names>A.</given-names></name></person-group> (<year>2009</year>). <article-title>Shared perceptual basis of emotional expressions and trustworthiness impressions from faces</article-title>. <source>Emotion</source> <volume>9</volume>, <fpage>128</fpage>&#x2013;<lpage>133</lpage>. doi: <pub-id pub-id-type="doi">10.1037/a0014520</pub-id>, PMID: <pub-id pub-id-type="pmid">19186926</pub-id></citation></ref>
<ref id="ref40"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Przylipiak</surname> <given-names>M.</given-names></name> <name><surname>Przylipiak</surname> <given-names>J.</given-names></name> <name><surname>Terlikowski</surname> <given-names>R.</given-names></name> <name><surname>Lubowicka</surname> <given-names>E.</given-names></name> <name><surname>Chrostek</surname> <given-names>L.</given-names></name> <name><surname>Przylipiak</surname> <given-names>A.</given-names></name></person-group> (<year>2018</year>). <article-title>Impact of face proportions on face attractiveness</article-title>. <source>J. Cosmet. Dermatol.</source> <volume>17</volume>, <fpage>954</fpage>&#x2013;<lpage>959</lpage>. doi: <pub-id pub-id-type="doi">10.1111/jocd.12783</pub-id>, PMID: <pub-id pub-id-type="pmid">30291673</pub-id></citation></ref>
<ref id="ref41"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Reis</surname> <given-names>H. T.</given-names></name> <name><surname>Wilson</surname> <given-names>I. M.</given-names></name> <name><surname>Monestere</surname> <given-names>C.</given-names></name> <name><surname>Bernstein</surname> <given-names>S.</given-names></name> <name><surname>Clark</surname> <given-names>K.</given-names></name> <name><surname>Seidl</surname> <given-names>E.</given-names></name> <etal/></person-group>. (<year>1990</year>). <article-title>What is smiling is beautiful and good</article-title>. <source>Eur. J. Soc. Psychol.</source> <volume>20</volume>, <fpage>259</fpage>&#x2013;<lpage>267</lpage>. doi: <pub-id pub-id-type="doi">10.1002/ejsp.2420200307</pub-id></citation></ref>
<ref id="ref42"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Royer</surname> <given-names>J.</given-names></name> <name><surname>Blais</surname> <given-names>C.</given-names></name> <name><surname>Barnab&#x00E9;-Lortie</surname> <given-names>V.</given-names></name> <name><surname>Carr&#x00E9;</surname> <given-names>M.</given-names></name> <name><surname>Leclerc</surname> <given-names>J.</given-names></name> <name><surname>Fiset</surname> <given-names>D.</given-names></name></person-group> (<year>2016</year>). <article-title>Efficient visual information for unfamiliar face matching despite viewpoint variations: It&#x2019;s not in the eyes! [review of efficient visual information for unfamiliar face matching despite viewpoint variations: It&#x2019;s not in the eyes!]</article-title>. <source>Vis. Res.</source> <volume>123</volume>, <fpage>33</fpage>&#x2013;<lpage>40</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.visres.2016.04.004</pub-id>, PMID: <pub-id pub-id-type="pmid">27179558</pub-id></citation></ref>
<ref id="ref43"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Scheib</surname> <given-names>J. E.</given-names></name> <name><surname>Gangestad</surname> <given-names>S. W.</given-names></name> <name><surname>Thornhill</surname> <given-names>R.</given-names></name></person-group> (<year>1999</year>). <article-title>Facial attractiveness, symmetry and cues of good genes</article-title>. <source>Proc. R. Soc. Lond. Ser. B Biol. Sci.</source> <volume>266</volume>, <fpage>1913</fpage>&#x2013;<lpage>1917</lpage>. doi: <pub-id pub-id-type="doi">10.1098/rspb.1999.0866</pub-id>, PMID: <pub-id pub-id-type="pmid">10535106</pub-id></citation></ref>
<ref id="ref44"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Schmidt</surname> <given-names>K.</given-names></name> <name><surname>Levenstein</surname> <given-names>R.</given-names></name> <name><surname>Ambadar</surname> <given-names>Z.</given-names></name></person-group> (<year>2012</year>). <article-title>Intensity of smiling and attractiveness as facial signals of trustworthiness in women</article-title>. <source>Percept. Mot. Skills</source> <volume>114</volume>, <fpage>964</fpage>&#x2013;<lpage>978</lpage>. doi: <pub-id pub-id-type="doi">10.2466/07.09.21.pms.114.3.964-978</pub-id>, PMID: <pub-id pub-id-type="pmid">22913033</pub-id></citation></ref>
<ref id="ref45"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tanaka</surname> <given-names>J. W.</given-names></name> <name><surname>Kaiser</surname> <given-names>M. D.</given-names></name> <name><surname>Butler</surname> <given-names>S.</given-names></name> <name><surname>Le Grand</surname> <given-names>R.</given-names></name></person-group> (<year>2012</year>). <article-title>Mixed emotions: holistic and analytic perception of facial expressions</article-title>. <source>Cognit. Emot.</source> <volume>26</volume>, <fpage>961</fpage>&#x2013;<lpage>977</lpage>. doi: <pub-id pub-id-type="doi">10.1080/02699931.2011.630933</pub-id>, PMID: <pub-id pub-id-type="pmid">22273429</pub-id></citation></ref>
<ref id="ref46"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Thornhill</surname> <given-names>R.</given-names></name> <name><surname>Gangestad</surname> <given-names>S. W.</given-names></name></person-group> (<year>1993</year>). <article-title>Human facial beauty</article-title>. <source>Hum. Nat.</source> <volume>4</volume>, <fpage>237</fpage>&#x2013;<lpage>269</lpage>. doi: <pub-id pub-id-type="doi">10.1007/bf02692201</pub-id></citation></ref>
<ref id="ref47"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Thornhill</surname> <given-names>R.</given-names></name> <name><surname>Gangestad</surname> <given-names>S. W.</given-names></name></person-group> (<year>1999</year>). <article-title>Facial attractiveness</article-title>. <source>Trends Cogn. Sci.</source> <volume>3</volume>, <fpage>452</fpage>&#x2013;<lpage>460</lpage>. doi: <pub-id pub-id-type="doi">10.1016/s1364-6613(99)01403-5</pub-id></citation></ref>
<ref id="ref48"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tsantani</surname> <given-names>M.</given-names></name> <name><surname>Podgajecka</surname> <given-names>V.</given-names></name> <name><surname>Gray</surname> <given-names>K. L. H.</given-names></name> <name><surname>Cook</surname> <given-names>R.</given-names></name></person-group> (<year>2022</year>). <article-title>How does the presence of a surgical face mask impair the perceived intensity of facial emotions?</article-title> <source>PLoS One</source> <volume>17</volume>:<fpage>e0262344</fpage>. doi: <pub-id pub-id-type="doi">10.1371/journal.pone.0262344</pub-id>, PMID: <pub-id pub-id-type="pmid">35025948</pub-id></citation></ref>
<ref id="ref49"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ueda</surname> <given-names>R.</given-names></name> <name><surname>Kuraguchi</surname> <given-names>K.</given-names></name> <name><surname>Ashida</surname> <given-names>H.</given-names></name></person-group> (<year>2016</year>). <article-title>Asymmetric effect of expression intensity on evaluations of facial attractiveness</article-title>. <source>SAGE Open</source> <volume>6</volume>:<fpage>215824401667756</fpage>. doi: <pub-id pub-id-type="doi">10.1177/2158244016677569</pub-id></citation></ref>
<ref id="ref50"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ulrich</surname> <given-names>L.</given-names></name> <name><surname>Dugelay</surname> <given-names>J.-L.</given-names></name> <name><surname>Vezzetti</surname> <given-names>E.</given-names></name> <name><surname>Moos</surname> <given-names>S.</given-names></name> <name><surname>Marcolin</surname> <given-names>F.</given-names></name></person-group> (<year>2019</year>). <article-title>Perspective morphometric criteria for facial beauty and proportion assessment</article-title>. <source>Appl. Sci.</source> <volume>10</volume>:<fpage>8</fpage>. doi: <pub-id pub-id-type="doi">10.3390/app10010008</pub-id></citation></ref>
<ref id="ref51"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wagner</surname> <given-names>H. L.</given-names></name></person-group> (<year>1990</year>). <article-title>The spontaneous facial expression of differential positive and negative emotions</article-title>. <source>Motiv. Emot.</source> <volume>14</volume>, <fpage>27</fpage>&#x2013;<lpage>43</lpage>. doi: <pub-id pub-id-type="doi">10.1007/bf00995547</pub-id></citation></ref>
<ref id="ref52"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Xiao</surname> <given-names>W. S.</given-names></name> <name><surname>Fu</surname> <given-names>G.</given-names></name> <name><surname>Quinn</surname> <given-names>P. C.</given-names></name> <name><surname>Sun</surname> <given-names>Y.-H.</given-names></name> <name><surname>Xiao</surname> <given-names>N. G.</given-names></name> <name><surname>Wang</surname> <given-names>Q.</given-names></name> <etal/></person-group>. (<year>2014</year>). <article-title>The eye-size illusion: psychophysical characteristics, generality, and relation to holistic face processing</article-title>. <source>Perception</source> <volume>43</volume>, <fpage>265</fpage>&#x2013;<lpage>274</lpage>. doi: <pub-id pub-id-type="doi">10.1068/p7647</pub-id>, PMID: <pub-id pub-id-type="pmid">25109017</pub-id></citation></ref>
<ref id="ref53"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>&#x017B;urowska</surname> <given-names>N.</given-names></name> <name><surname>Ka&#x0142;wa</surname> <given-names>A.</given-names></name> <name><surname>Rymarczyk</surname> <given-names>K.</given-names></name> <name><surname>Habrat</surname> <given-names>B.</given-names></name></person-group> (<year>2018</year>). <article-title>Recognition of emotional facial expressions in benzodiazepine dependence and detoxification</article-title>. <source>Cogn. Neuropsychiatry</source> <volume>23</volume>, <fpage>74</fpage>&#x2013;<lpage>87</lpage>. doi: <pub-id pub-id-type="doi">10.1080/13546805.2018.1426448</pub-id></citation></ref>
</ref-list>
</back>
</article>
