<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "http://dtd.nlm.nih.gov/publishing/2.0/journalpublishing.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article" dtd-version="2.0">
  <front>
    <journal-meta>
      <journal-id journal-id-type="publisher-id">JFR</journal-id>
      <journal-id journal-id-type="nlm-ta">JMIR Form Res</journal-id>
      <journal-title>JMIR Formative Research</journal-title>
      <issn pub-type="epub">2561-326X</issn>
      <publisher>
        <publisher-name>JMIR Publications</publisher-name>
        <publisher-loc>Toronto, Canada</publisher-loc>
      </publisher>
    </journal-meta>
    <article-meta>
      <article-id pub-id-type="publisher-id">v7i1e44632</article-id>
      <article-id pub-id-type="pmid">37166970</article-id>
      <article-id pub-id-type="doi">10.2196/44632</article-id>
      <article-categories>
        <subj-group subj-group-type="heading">
          <subject>Original Paper</subject>
        </subj-group>
        <subj-group subj-group-type="article-type">
          <subject>Original Paper</subject>
        </subj-group>
      </article-categories>
      <title-group>
        <article-title>Evaluating the Feasibility of Emotion Expressions in Avatars Created From Real Person Photos: Pilot Web-Based Survey of Virtual Reality Software</article-title>
      </title-group>
      <contrib-group>
        <contrib contrib-type="editor">
          <name>
            <surname>Mavragani</surname>
            <given-names>Amaryllis</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib contrib-type="reviewer">
          <name>
            <surname>PÉrez-VÁzquez</surname>
            <given-names>Elena</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib id="contrib1" contrib-type="author" corresp="yes">
          <name name-style="western">
            <surname>Dechsling</surname>
            <given-names>Anders</given-names>
          </name>
          <degrees>MSci</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <address>
            <institution>Department of Education, ICT and Learning</institution>
            <institution>Faculty of Teacher Education and Languages</institution>
            <institution>Østfold University College</institution>
            <addr-line>B R A veien 4</addr-line>
            <addr-line>Halden, NO-1757</addr-line>
            <country>Norway</country>
            <phone>47 69608000</phone>
            <email>anders.dechsling@hiof.no</email>
          </address>
          <xref rid="aff2" ref-type="aff">2</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-4839-8703</ext-link>
        </contrib>
        <contrib id="contrib2" contrib-type="author">
          <name name-style="western">
            <surname>Cogo-Moreira</surname>
            <given-names>Hugo</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0001-9411-9237</ext-link>
        </contrib>
        <contrib id="contrib3" contrib-type="author">
          <name name-style="western">
            <surname>Gangestad</surname>
            <given-names>Jonathan Spydevold</given-names>
          </name>
          <degrees>BA</degrees>
          <xref rid="aff3" ref-type="aff">3</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0009-0001-6379-6304</ext-link>
        </contrib>
        <contrib id="contrib4" contrib-type="author">
          <name name-style="western">
            <surname>Johannessen</surname>
            <given-names>Sandra Nettum</given-names>
          </name>
          <degrees>BA</degrees>
          <xref rid="aff3" ref-type="aff">3</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0009-0006-0191-7437</ext-link>
        </contrib>
        <contrib id="contrib5" contrib-type="author">
          <name name-style="western">
            <surname>Nordahl-Hansen</surname>
            <given-names>Anders</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-6411-3122</ext-link>
        </contrib>
      </contrib-group>
      <aff id="aff1">
        <label>1</label>
        <institution>Department of Education, ICT and Learning</institution>
        <institution>Faculty of Teacher Education and Languages</institution>
        <institution>Østfold University College</institution>
        <addr-line>Halden</addr-line>
        <country>Norway</country>
      </aff>
      <aff id="aff2">
        <label>2</label>
        <institution>Department of Behavioral Sciences</institution>
        <institution>Oslo Metropolitan University</institution>
        <addr-line>Oslo</addr-line>
        <country>Norway</country>
      </aff>
      <aff id="aff3">
        <label>3</label>
        <institution>Department of Welfare, Management and Organisation</institution>
        <institution>Faculty of Health, Welfare and Organisation</institution>
        <institution>Østfold University College</institution>
        <addr-line>Halden</addr-line>
        <country>Norway</country>
      </aff>
      <author-notes>
        <corresp>Corresponding Author: Anders Dechsling <email>anders.dechsling@hiof.no</email></corresp>
      </author-notes>
      <pub-date pub-type="collection">
        <year>2023</year>
      </pub-date>
      <pub-date pub-type="epub">
        <day>11</day>
        <month>5</month>
        <year>2023</year>
      </pub-date>
      <volume>7</volume>
      <elocation-id>e44632</elocation-id>
      <history>
        <date date-type="received">
          <day>27</day>
          <month>11</month>
          <year>2022</year>
        </date>
        <date date-type="rev-request">
          <day>2</day>
          <month>3</month>
          <year>2023</year>
        </date>
        <date date-type="rev-recd">
          <day>22</day>
          <month>3</month>
          <year>2023</year>
        </date>
        <date date-type="accepted">
          <day>11</day>
          <month>4</month>
          <year>2023</year>
        </date>
      </history>
      <copyright-statement>©Anders Dechsling, Hugo Cogo-Moreira, Jonathan Spydevold Gangestad, Sandra Nettum Johannessen, Anders Nordahl-Hansen. Originally published in JMIR Formative Research (https://formative.jmir.org), 11.05.2023.</copyright-statement>
      <copyright-year>2023</copyright-year>
      <license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/">
        <p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (https://creativecommons.org/licenses/by/4.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in JMIR Formative Research, is properly cited. The complete bibliographic information, a link to the original publication on https://formative.jmir.org, as well as this copyright and license information must be included.</p>
      </license>
      <self-uri xlink:href="https://formative.jmir.org/2023/1/e44632" xlink:type="simple"/>
      <abstract>
        <sec sec-type="background">
          <title>Background</title>
          <p>The availability and potential of virtual reality (VR) has led to an increase of its application. VR is suggested to be helpful in training elements of social competence but with an emphasis on interventions being tailored. Recognizing facial expressions is an important social skill and thus a target for training. Using VR in training these skills could have advantages over desktop alternatives. Children with autism, for instance, appear to prefer avatars over real images when assessing facial expressions. Available software provides the opportunity to transform profile pictures into avatars, thereby giving the possibility of tailoring according to an individual’s own environment. However, the emotions provided by such software should be validated before application.</p>
        </sec>
        <sec sec-type="objective">
          <title>Objective</title>
          <p>Our aim was to investigate whether available software is a quick, easy, and viable way of providing emotion expressions in avatars transformed from real images.</p>
        </sec>
        <sec sec-type="methods">
          <title>Methods</title>
          <p>A total of 401 participants from a general population completed a survey on the web containing 27 different images of avatars transformed, using a software, from real images. We calculated the reliability of each image and their level of difficulty using a structural equation modeling approach. We used Bayesian confirmatory factor analysis testing under a multidimensional first-order correlated factor structure where faces showing the same emotions represented a latent variable.</p>
        </sec>
        <sec sec-type="results">
          <title>Results</title>
          <p>Few emotions were correctly perceived and rated as higher than other emotions. The factor loadings indicating the discrimination of the image were around 0.7, which means 49% shared variance with the latent factor that the face is linked with. The standardized thresholds indicating the difficulty level of the images are mostly around average, and the highest correlation is between faces showing happiness and anger.</p>
        </sec>
        <sec sec-type="conclusions">
          <title>Conclusions</title>
          <p>Only using a software to transform profile pictures to avatars is not sufficient to provide valid emotion expressions. Adjustments are needed to increase faces’ discrimination (eg, increasing reliabilities). The faces showed average levels of difficulty, meaning that they are neither very difficult nor very easy to perceive, which fits a general population. Adjustments should be made for specific populations and when applying this technology in clinical practice.</p>
        </sec>
      </abstract>
      <kwd-group>
        <kwd>avatar</kwd>
        <kwd>emotion recognition</kwd>
        <kwd>emotion</kwd>
        <kwd>face</kwd>
        <kwd>facial expression</kwd>
        <kwd>facial</kwd>
        <kwd>images</kwd>
        <kwd>real images</kwd>
        <kwd>software</kwd>
        <kwd>virtual reality</kwd>
      </kwd-group>
    </article-meta>
  </front>
  <body>
    <sec sec-type="introduction">
      <title>Introduction</title>
      <p>Perception and processing of facial expression and emotions through the use of images is a long-standing research field [<xref ref-type="bibr" rid="ref1">1</xref>] and the use of facial emotion expression has become more common. Various sets of facial expressions have been developed for research purposes, deploying different facial expressions for different ethnicities [<xref ref-type="bibr" rid="ref2">2</xref>]. The need for differing ethnicity samples of facial expressions follows the rationale that “within-group” processing of emotions is more readily available than “out-groups.” The use of facial expressions in web-based experimental research has also been on the rise, and databases such as the <italic>Umeå University Database of Facial Expressions</italic> [<xref ref-type="bibr" rid="ref2">2</xref>].</p>
      <p>Facial recognition and emotion training has for example been used in the treatment of anxiety and depression [<xref ref-type="bibr" rid="ref3">3</xref>]. The potential for developing readily available databases for use with other groups with various diagnoses should be explored. However, some groups, such as many of those on the autism spectrum, are known to struggle with recognizing emotions in others [<xref ref-type="bibr" rid="ref4">4</xref>]. For many people with autism, it would be beneficial to be able to recognize other people’s facial expression when maneuvering the society. This study aims to validate emotion expressions created by a software that uses real profile pictures that are transformed into avatars. One important reason to use such software is that children with autism seem to prefer avatars over real photos [<xref ref-type="bibr" rid="ref5">5</xref>]. Interventions for children with autism should be individually tailored and the software could be a feasible way to quickly create the necessary material such as avatars made from people in the individual’s own environment. However, to make valid conclusions about the effects of an intervention, there is a need to validate the actual emotions expressed in the avatars. This proof-of-concept pilot survey therefore aims to investigate the feasibility in a general population first. With the knowledge on whether the emotions are correct or incorrect, it is possible to decide on the next step. Either proceed with investigating the facial emotion expression assessment of specific populations such as those on the autism spectrum or adjust the technology or use of it before proceeding further.</p>
      <p>Autism spectrum disorders (autism from hereon) are characterized by challenges or differences in 2 main domains. The first domain relates to social interaction and communication. The American Psychiatric Association diagnostic manual indicates that the social communicational aspects are related to social-emotional reciprocity, for example differences in initiation and response in social interaction, nonverbal communicative behaviors, and developing and maintaining social relationships [<xref ref-type="bibr" rid="ref6">6</xref>]. The second domain highlighted in the Diagnostic and Statistical Manual of Mental Disorders, Fifth Edition, is stereotypic and repetitive behaviors [<xref ref-type="bibr" rid="ref6">6</xref>] and can be related to for instance fixed patterns of behavior, interests or routines, stereotyped motor movements, and hypo or hyperactivity related to sensory input. The prevalence of autism worldwide is estimated to be around 1% [<xref ref-type="bibr" rid="ref7">7</xref>] and many individuals with autism need special education or other support systems [<xref ref-type="bibr" rid="ref8">8</xref>]. It is important to acknowledge that there is a high heterogeneity between the individuals who fulfill the diagnostic criteria [<xref ref-type="bibr" rid="ref9">9</xref>]. This means that the help and support individuals with autism might need should be tailored toward each person individually.</p>
      <p>Young children diagnosed with autism tend to show more interest toward nonsocial stimuli than social stimuli [<xref ref-type="bibr" rid="ref10">10</xref>]. The interest toward nonsocial stimuli might lead to children with autism missing out on social learning during early years, and thus hinder them in fulfilling potential desires for social interaction with peers at later stages in life. Social skills are thus considered an important target for interventions within autism research and clinical practice because of the possible difficulties highlighted as key domains to receive an autism diagnosis [<xref ref-type="bibr" rid="ref11">11</xref>]. However, social interaction and communication are a highly complex domain consisting of a wide range of knowledge and skills of which the mastering criteria always depend on the context. One important area of social skills is to quickly recognize emotion expression in others and thereby behave and respond appropriately [<xref ref-type="bibr" rid="ref12">12</xref>]. Facial expressions are therefore a subject in many social skill interventions and taught in most of the group social skill interventions [<xref ref-type="bibr" rid="ref11">11</xref>]. Deficits in emotion recognition are associated with difficulties in social interaction [<xref ref-type="bibr" rid="ref13">13</xref>] and as a predictor of difficulties in adaptive socialization [<xref ref-type="bibr" rid="ref14">14</xref>]. The first step in responding to an emotion in another person’s facial expression is to identify the emotion expressed. Hence, there has been a focus to investigate [<xref ref-type="bibr" rid="ref15">15</xref>] and teach recognition of facial expressions and emotions to individuals with autism [<xref ref-type="bibr" rid="ref12">12</xref>,<xref ref-type="bibr" rid="ref16">16</xref>]. These skills could be trained using immersive technology, thus reaping the benefits of the interest individuals with autism show towards computer-based environments [<xref ref-type="bibr" rid="ref17">17</xref>].</p>
      <p>Several researchers suggest that virtual reality (VR) technology could show promise in enhancing social skills [<xref ref-type="bibr" rid="ref18">18</xref>,<xref ref-type="bibr" rid="ref19">19</xref>]. VR is a term describing technology that displays potential real-world–like digital environments using visual and auditory stimuli through head-mounted displays (HMD), projectors or desktop or tablet devices with a possibility of interacting with that environment [<xref ref-type="bibr" rid="ref20">20</xref>]. There are also different modalities of HMDs and projector setups. For instance, VR HMD consists of wearable goggles with inbuilt screens that give the user a feeling of being completely surrounded by the virtual environment and various versions of VR HMD provide various levels of digital interaction possibilities. Augmented reality (AR) is technology wherein digital components or images are superimposed on or blended with the real-world environment [<xref ref-type="bibr" rid="ref21">21</xref>], often viewed through a mobile phone or tablet screen, or AR HMD (most often referred to as AR glasses or smart glasses). VR projector setups could range from Kinect technology using a projector and a screen in combination with motion sensors, to a full cave automatic virtual environment that consists of projectors and screens surrounding the user [<xref ref-type="bibr" rid="ref22">22</xref>]. The potential uses of VR have led to an increase of its application in educational and special education settings [<xref ref-type="bibr" rid="ref23">23</xref>], and in particular the amount of research on the use of VR for individuals diagnosed with autism [<xref ref-type="bibr" rid="ref24">24</xref>,<xref ref-type="bibr" rid="ref25">25</xref>].</p>
      <p>Importantly, VR has shown to be an acceptable tool for individuals with autism in general [<xref ref-type="bibr" rid="ref24">24</xref>] and for individuals with autism in need for more comprehensive support [<xref ref-type="bibr" rid="ref26">26</xref>]. More than 1 in 10 studies on autism and social skills in VR/AR target emotion recognition behaviors [<xref ref-type="bibr" rid="ref27">27</xref>]. Farashi et al [<xref ref-type="bibr" rid="ref28">28</xref>] identified, in their systematic review and meta-analysis, a positive influence from VR or computerized training in emotion recognition by individuals with autism. After including 23 studies that focused on autism and VR or computerized training programs for emotion recognition, they calculated an overall effect size that was relatively large (<italic>d</italic>=.69)—considering the autism context [<xref ref-type="bibr" rid="ref29">29</xref>]. However, the results obtained from Farashi et al [<xref ref-type="bibr" rid="ref28">28</xref>] should be evaluated with caution since it is a quite heterogenic sample of studies. Some studies have also used AR in the form of smart glasses in training emotion recognition [<xref ref-type="bibr" rid="ref30">30</xref>,<xref ref-type="bibr" rid="ref31">31</xref>].</p>
      <p>Children with autism show a preference toward the digital avatars as opposed to a human assistant [<xref ref-type="bibr" rid="ref5">5</xref>]. We therefore consider social cues provided by avatars more in line with stakeholder preference in the early stages of skill training. This might be one of the main arguments for using avatars. Additionally, most of the studies included in Farashi et al [<xref ref-type="bibr" rid="ref28">28</xref>] used facial avatars and they suggest that this could have positive effects for individuals with autism. Pino et al [<xref ref-type="bibr" rid="ref32">32</xref>] concluded that children with autism experience less difficulties with recognizing emotions expressed by avatars as opposed to real images, and through eye tracking it was discovered that avatar faces were more explored than real faces. However, creating avatars with ecological valid expressions remains a possible challenge. Emotion expressions in general or specific populations do not necessarily differ per se. Therefore, the faces can be used interchangeably for the various population although there are numerous variables that can affect the recognition of emotions [<xref ref-type="bibr" rid="ref33">33</xref>] such as for example ethnicity [<xref ref-type="bibr" rid="ref2">2</xref>]. There are several “picture banks” developed, but many receive criticism related to the number of images or their representativeness [<xref ref-type="bibr" rid="ref2">2</xref>]. In many cases, there is a need for individually adjusted exercises and therefore facial expressions from the persons in the target individual’s actual network could be more helpful and useful than unknown persons. A software allowing photos of people to create emotion expressions in avatars could solve several issues related to, for instance, sufficient material or representativeness. Consequently, a possible pitfall might be the validity of the expressions, and how to create such faces. In Pino et al [<xref ref-type="bibr" rid="ref32">32</xref>], their expressions were validated through 2 psychologists and 20 typically developing children. In contrast, Tsai et al [<xref ref-type="bibr" rid="ref34">34</xref>] applied virtual technology in emotion recognition but do only state to have validated the emotions in beforehand without stating how.</p>
      <p>In sum, emotion recognition is a frequent target in social skills interventions using immersive technology, especially for individuals with autism [<xref ref-type="bibr" rid="ref27">27</xref>]. Avatars could contribute to positive effects [<xref ref-type="bibr" rid="ref28">28</xref>] since they appear as the most preferred [<xref ref-type="bibr" rid="ref5">5</xref>] and explored, as well as perceived as less difficult to assess [<xref ref-type="bibr" rid="ref32">32</xref>]. When considering the claim and call for tailored interventions, and the research showing that children with autism might prefer avatars as opposed to real images, we here investigate whether an available software that can transform profile pictures into avatars is a quick, easy, and viable way of providing various emotion expressions in avatars created from the individual’s actual surroundings (caregivers, teachers, peers, etc). That is, we investigate features that easily allow for manipulating the emotions expressed by the avatars and test whether the program makes expressions that are perceived correctly, according to the program settings, by a general and unspecific population. As a starting point and for piloting reasons, we use an unspecific population (ie, general population sample), meaning that we do not exclude any specific population such as for example an autism population, since we believe that an unspecific population will be more representative for a general assessment of the emotion expressions. This evaluation could determine whether such software could be used at later stages when training the specific skill of emotion recognition for individuals in various specific populations including those with autism.</p>
      <p>Therefore, the overall research objective is to investigate whether an emotion expression software provides valid emotion expression when tested in a general population? More specifically our research questions are as follows:</p>
      <list list-type="bullet">
        <list-item>
          <p>Research question 1: Are each emotion displayed by the avatars perceived correctly by the participants?</p>
        </list-item>
        <list-item>
          <p>Research question 2: What is the discrimination level between the images?</p>
        </list-item>
        <list-item>
          <p>Research question 3: What are the levels of difficulty in the images?</p>
        </list-item>
      </list>
    </sec>
    <sec sec-type="method">
      <title>Methods</title>
      <sec>
        <title>Participants</title>
        <p>All 401 participants who completed (data on noncompleters are not applicable) the survey were recruited through social media platforms with an open invitation and link to respond to the survey. The survey was open for 8 weeks. Participants were asked to report their gender, whereas 86.8% (348/401) reported to be female, 13% (52/401) males, and 1 individual responded as “other.” The geographical origin of the participants was Scandinavia (382/401, 95.3%), rest of Europe (14/401, 3.5%), and spread around the world (5/401, 1.6%). The majority of participants were aged between 36 and 55 years (see <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref> for age distribution). The survey was piloted by 3 experts in the field and adjustments to the length were made prior to the publication of the survey.</p>
      </sec>
      <sec>
        <title>Ethical Consideration</title>
        <p>A formal ethical review from an ethical committee was not required for this study because no identifiable or health-related information was gathered from the participants. This has been reviewed and confirmed by the responsible faculty dean in line with the institutional guidelines. Confidentiality principles were safeguarded through the officially approved web-based survey tool <italic>Nettskjema</italic> that ensures proper data protection services (nettskjema@usit.uio.no). No identifying information (eg, IP address) was collected. All participants were provided with information on data protection and that by proceeding with the survey they made their voluntary informed consent, of which they could withdraw by exiting the survey, as recommended in the general guidelines of the Norwegian National Research Ethics Committees. No compensation was given for participation.</p>
      </sec>
      <sec>
        <title>Apparatus and Stimuli</title>
        <p>The survey was made using <italic>Nettskjema</italic>, a survey solution developed and hosted by the University of Oslo (nettskjema@usit.uio.no), which also ensures proper data protection services. This survey tool presents a fixed layout with possibilities of conducting various types of surveys.</p>
        <p>The pictures were made using the software Character Creator (developed and copyright by Reallusion Inc.)<bold>.</bold> This software has an artificial intelligence (AI) function that enables the user to upload any photograph of a person’s face, thereby transforming it to a 3D model of the person. This feature also has the ability to adjust and transform the face to make it unrecognizable in the case of a need for privacy protection. The software has a number of pregenerated facial expressions with an additional “expressiveness scale” that can be used to adjust the faces. Furthermore, the software enables the user to adjust facial features such as eyebrows, nose, and all other features.</p>
        <p>A total of 36 pictures were designed, by VisuMedia, as a sample of avatars showing various emotions. The sample consisted of 4 different avatars with 9 different emotion expressions. The 9 emotions were the basic emotions Happy, Sad, Afraid, Angry, and the more “complex” Disgusted, Surprised, Interested, Bored, and Ashamed [<xref ref-type="bibr" rid="ref12">12</xref>].</p>
        <p>In developing the stimuli, real photos were uploaded to the headshot feature in the Character Creator 3 software. The photos were transformed to avatars using the Edit Facial feature. The preprogrammed and standard emotion settings were applied to these avatars with 100% on the expressiveness scale and exported as JPEG files.</p>
      </sec>
      <sec>
        <title>Survey</title>
        <p>The survey is a systematic replication of Samuelsson et al [<xref ref-type="bibr" rid="ref2">2</xref>], in terms of developing the questionnaire. The survey was accessed through a link that was distributed through social media platforms such as Facebook, open from April 28 to June 18, 2021. It was created using the service <italic>Nettskjema</italic>, which has some restrictions on layout and design that affected the presentation of the scales and photos in the survey.</p>
        <p>The participant first received the instruction:</p>
        <disp-quote>
          <p>In this survey you will be presented with a number of images of faces with different emotion expressions. With every image there will be presented alternatives to different emotions and a scale from 0 to 9, where 0 indicates that you completely disagree and 9 that you completely agree. Place your answer on the scale based on your opinion. Your responses are completely anonymous and cannot be traced to you or your IP address.</p>
        </disp-quote>
        <p>After pressing the consent button another page appeared with the demographic questions. When pressing “next,” a new page appeared with the text: “You are now ready to start the survey. Note that only one image will appear in a single page even though the image is repeated on that same page. Press Next page to start.”</p>
        <p>The participants were then presented with the text: “This person seems to be…,” the picture followed by all mentioned emotions. The participants were asked to rate each emotion to the same picture with a scale from 0 (disagree completely) and 9 (completely agree). See <xref rid="figure1" ref-type="fig">Figure 1</xref> for an illustration.</p>
        <p>The pictures were presented serialized in separate pages. The order of presenting the pictures in the survey was determined using “List Randomizer” from the randomization service [<xref ref-type="bibr" rid="ref35">35</xref>]. The last 9 pictures on the list were removed from the survey due to the length of the survey, meaning that 27 pictures were included in the survey (see <xref rid="figure2" ref-type="fig">Figure 2</xref>). All participants who completed the survey were presented with the pictures in the same order.</p>
        <fig id="figure1" position="float">
          <label>Figure 1</label>
          <caption>
            <p>The survey page.</p>
          </caption>
          <graphic xlink:href="formative_v7i1e44632_fig1.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <fig id="figure2" position="float">
          <label>Figure 2</label>
          <caption>
            <p>Images from the survey.</p>
          </caption>
          <graphic xlink:href="formative_v7i1e44632_fig2.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Statistical Analysis</title>
        <p>Based on Samuelsson et al [<xref ref-type="bibr" rid="ref2">2</xref>], we</p>
        <disp-quote>
          <p>… [considered] an image to be correctly classified if the highest score was given to the emotion corresponding to the true emotion. For example, if the emotion ‘sad’ was scored seven and the other emotions between 0 and 6 points, then sad would be counted as the response (p. 3).</p>
        </disp-quote>
        <p>After this procedure, we obtained 27 dichotomous variables used as observed outcomes in a structural equation modeling approach. This approach calculates (1) the reliability of each image and (2) the level of difficulty (ie, the threshold) for each one of the face images. We used Bayesian confirmatory factor analysis testing, a multidimensional first-order correlated factor structure where faces showing the same emotion represented a latent variable. Therefore, we created 8 latent variables underlying the 27 observed variables. The Bayes estimator was used as it is compatible with such a high number of dimensionalities.</p>
        <p>For all parameters (eg, factor loading and thresholds), we chose uninformative priors [<xref ref-type="bibr" rid="ref36">36</xref>], assigned by default used in Mplus for dichotomous indicators. The priors are normally distributed with 0 mean and infinite variance.</p>
        <p>Proportional scale reduction (PSR) was calculated to evaluate convergence. The Bayesian analysis used Markov chain Monte Carlo algorithms to iteratively obtain an approximation to the posterior distributions of the parameters. This approach was used to compare the variation of the parameters estimated in each iteration (called a chain). The PSR criterion essentially requires the between-chain variation to be smaller than the total of between- and within-chain variations. In terms of convergence, due to the complexity of the model, the minimum number of total iterations was 100,000, which included the discards.</p>
        <p>The model under testing in this study was run until the chain goal reached a PSR value of 1.0. We used posterior predictive <italic>P</italic> values (PPP) to test the structural model for misspecifications. If the models fit the data well, the PPP would be close to 0.5. The corresponding 95% CI for the difference between the observed and the replicated chi-square value would range from a negative value to the same positive value and be centered on 0 [<xref ref-type="bibr" rid="ref37">37</xref>,<xref ref-type="bibr" rid="ref38">38</xref>].</p>
      </sec>
    </sec>
    <sec sec-type="results">
      <title>Results</title>
      <sec>
        <title>Research Question 1</title>
        <p><xref ref-type="table" rid="table1">Table 1</xref> shows the percentage correctly perceived, meaning how often the emotion that models displayed was rated higher than all other emotions. The faces showing the highest percent of correctly perceived expressions were F4 (310/401, 77.3%) and F25 (296/401, 73.8%) and the lowest were F2 (12/401, 3%) and F8 (20/401, 5%).</p>
        <table-wrap position="float" id="table1">
          <label>Table 1</label>
          <caption>
            <p>Percentage of correctly perceived emotions. </p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="30"/>
            <col width="470"/>
            <col width="0"/>
            <col width="300"/>
            <col width="0"/>
            <col width="200"/>
            <thead>
              <tr valign="top">
                <td colspan="3">Image ID and outcomes</td>
                <td colspan="2">Proportion</td>
                <td>Count</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td colspan="6">
                  <bold>F1</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.895</td>
                <td colspan="2">359</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.105</td>
                <td colspan="2">42</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F2</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.970</td>
                <td colspan="2">389</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.030</td>
                <td colspan="2">12</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F3</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.401</td>
                <td colspan="2">161</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.599</td>
                <td colspan="2">240</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F4</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.227</td>
                <td colspan="2">91</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.773</td>
                <td colspan="2">310</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F5</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.915</td>
                <td colspan="2">367</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.085</td>
                <td colspan="2">34</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F6</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.394</td>
                <td colspan="2">158</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.606</td>
                <td colspan="2">243</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F7</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.481</td>
                <td colspan="2">193</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.519</td>
                <td colspan="2">208</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F8</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.950</td>
                <td colspan="2">381</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.050</td>
                <td colspan="2">20</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F9</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.454</td>
                <td colspan="2">182</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.546</td>
                <td colspan="2">219</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F10</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.870</td>
                <td colspan="2">349</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.130</td>
                <td colspan="2">52</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F11</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.698</td>
                <td colspan="2">280</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.302</td>
                <td colspan="2">121</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F12</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.756</td>
                <td colspan="2">303</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.244</td>
                <td colspan="2">98</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F13</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.349</td>
                <td colspan="2">140</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.651</td>
                <td colspan="2">261</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F14</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.406</td>
                <td colspan="2">163</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.594</td>
                <td colspan="2">238</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F15</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.347</td>
                <td colspan="2">139</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.653</td>
                <td colspan="2">262</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F16</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.229</td>
                <td colspan="2">92</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.771</td>
                <td colspan="2">309</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F17</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.446</td>
                <td colspan="2">179</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.554</td>
                <td colspan="2">222</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F18</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.908</td>
                <td colspan="2">364</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.092</td>
                <td colspan="2">37</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F19</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.791</td>
                <td colspan="2">317</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.209</td>
                <td colspan="2">84</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F20</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.414</td>
                <td colspan="2">166</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.586</td>
                <td colspan="2">235</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F21</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.683</td>
                <td colspan="2">274</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.317</td>
                <td colspan="2">127</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F22</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.703</td>
                <td colspan="2">282</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.297</td>
                <td colspan="2">119</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F23</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.778</td>
                <td colspan="2">312</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.222</td>
                <td colspan="2">89</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F24</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.516</td>
                <td colspan="2">207</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.484</td>
                <td colspan="2">194</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F25</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.262</td>
                <td colspan="2">105</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.738</td>
                <td colspan="2">296</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F26</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.416</td>
                <td colspan="2">167</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.584</td>
                <td colspan="2">234</td>
              </tr>
              <tr valign="top">
                <td colspan="6">
                  <bold>F27</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Incorrect</td>
                <td colspan="2">0.559</td>
                <td colspan="2">224</td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>Correct</td>
                <td colspan="2">0.441</td>
                <td colspan="2">177</td>
              </tr>
            </tbody>
          </table>
        </table-wrap>
      </sec>
      <sec>
        <title>Research Question 2</title>
        <p>The 8-correlated factor model required 4700 iterations to meet the convergence criterion. A PPP of 0.182 was found for the model, and the 95% CI for the difference between the observed and replicated log-likelihoods ranged from –37.732 to 129.904, indicating an acceptable model. The factor loadings (<xref ref-type="table" rid="table2">Table 2</xref>) were all statistically significant (ie, the credibility interval does not cross 0); the highest factor loading (ie, reliability) was observed among face 13 (surprise; factor loading=0.733) and F8 (interested; factor loading=0.653). The lowest factor loading was F21 (happy; factor loading=0.323) and F15 (angry=0.336). By low reliability, it means that the expression cannot discriminate those participants who are able (and not) to identify the expression under evaluation correctly. The majority of the faces showed a reliability superior to 0.4 which is a common cutoff for a meaningful factor loading effect size [<xref ref-type="bibr" rid="ref39">39</xref>]. Such a value represents that the face shares 16% of variance with the underlying factor).</p>
        <p>The highest correlation (<xref rid="figure3" ref-type="fig">Figure 3</xref>) was observed between happiness and anger (<italic>r</italic>=0.602), indicating that the more perception for anger someone has, the higher her or his perception of happiness will be. The lowest correlation was between sad and disgusted (<italic>r</italic>=–0.05), meaning that the recognition of both expressions is not correlated.</p>
        <table-wrap position="float" id="table2">
          <label>Table 2</label>
          <caption>
            <p>Standardized factor loadings, posterior SD, 95% credibility interval, and significance (yes/no).</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="30"/>
            <col width="200"/>
            <col width="0"/>
            <col width="190"/>
            <col width="0"/>
            <col width="170"/>
            <col width="0"/>
            <col width="240"/>
            <col width="0"/>
            <col width="170"/>
            <thead>
              <tr valign="top">
                <td colspan="3">Emotion and faces</td>
                <td colspan="2">Factor loading</td>
                <td colspan="2">Posterior SD</td>
                <td colspan="2">95% credibility interval</td>
                <td>Significance</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td colspan="10">
                  <bold>Interested</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>F1</td>
                <td colspan="2">0.450</td>
                <td colspan="2">0.074</td>
                <td colspan="2">0.319-0.597</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F8</td>
                <td colspan="2">0.653<sup>a</sup></td>
                <td colspan="2">0.116</td>
                <td colspan="2">0.401-0.856</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F18</td>
                <td colspan="2">0.527</td>
                <td colspan="2">0.111</td>
                <td colspan="2">0.309-0.737</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F19</td>
                <td colspan="2">0.589</td>
                <td colspan="2">0.105</td>
                <td colspan="2">0.374-0.774</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td colspan="10">
                  <bold>Ashamed</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>F2</td>
                <td colspan="2">0.452</td>
                <td colspan="2">0.106</td>
                <td colspan="2">0.326-0.726</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F5</td>
                <td colspan="2">0.525</td>
                <td colspan="2">0.176</td>
                <td colspan="2">0.116-0.840</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td colspan="10">
                  <bold>Happy</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>F3</td>
                <td colspan="2">0.412</td>
                <td colspan="2">0.062</td>
                <td colspan="2">0.310-0.548</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F21</td>
                <td colspan="2">0.323</td>
                <td colspan="2">0.126</td>
                <td colspan="2">0.071-0.566</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td colspan="10">
                  <bold>Sad</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>F4</td>
                <td colspan="2">0.492</td>
                <td colspan="2">0.069</td>
                <td colspan="2">0.363-0.656</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F6</td>
                <td colspan="2">0.559</td>
                <td colspan="2">0.122</td>
                <td colspan="2">0.309-0.796</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td colspan="10">
                  <bold>Disgusted</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>F7</td>
                <td colspan="2">0.366</td>
                <td colspan="2">0.057</td>
                <td colspan="2">0.278-0.500</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F10</td>
                <td colspan="2">0.623<sup>a</sup></td>
                <td colspan="2">0.120</td>
                <td colspan="2">0.357-0.831</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F11</td>
                <td colspan="2">0.364</td>
                <td colspan="2">0.108</td>
                <td colspan="2">0.149-0.571</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F23</td>
                <td colspan="2">0.628<sup>a</sup></td>
                <td colspan="2">0.101</td>
                <td colspan="2">0.429-0.815</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td colspan="10">
                  <bold>Surprised</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>F9</td>
                <td colspan="2">0.394</td>
                <td colspan="2">0.051</td>
                <td colspan="2">0.303-0.509</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F13</td>
                <td colspan="2">0.733<sup>a</sup></td>
                <td colspan="2">0.074</td>
                <td colspan="2">0.573-0.870</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F16</td>
                <td colspan="2">0.611</td>
                <td colspan="2">0.073</td>
                <td colspan="2">0.465-0.744</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F20</td>
                <td colspan="2">0.565</td>
                <td colspan="2">0.078</td>
                <td colspan="2">0.407-0.714</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td colspan="10">
                  <bold>Afraid</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>F17</td>
                <td colspan="2">0.463</td>
                <td colspan="2">0.072</td>
                <td colspan="2">0.326-0.631</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F24</td>
                <td colspan="2">0.401</td>
                <td colspan="2">0.093</td>
                <td colspan="2">0.214-0.571</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F26</td>
                <td colspan="2">0.599</td>
                <td colspan="2">0.102</td>
                <td colspan="2">0.411-0.813</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td colspan="10">
                  <bold>Bored</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>F12</td>
                <td colspan="2">0.531</td>
                <td colspan="2">0.066</td>
                <td colspan="2">0.396-0.657</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F25</td>
                <td colspan="2">0.516</td>
                <td colspan="2">0.093</td>
                <td colspan="2">0.325-0.680</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td colspan="10">
                  <bold>Angry</bold>
                </td>
              </tr>
              <tr valign="top">
                <td>
                  <break/>
                </td>
                <td>F22</td>
                <td colspan="2">0.466</td>
                <td colspan="2">0.064</td>
                <td colspan="2">0.364-0.617</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F14</td>
                <td colspan="2">0.468</td>
                <td colspan="2">0.097</td>
                <td colspan="2">0.281-0.666</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F15</td>
                <td colspan="2">0.336</td>
                <td colspan="2">0.094</td>
                <td colspan="2">0.147-0.517</td>
                <td colspan="2">Yes</td>
              </tr>
              <tr valign="top">
                <td> </td>
                <td>F27</td>
                <td colspan="2">0.628<sup>a</sup></td>
                <td colspan="2">0.090</td>
                <td colspan="2">0.442-0.808</td>
                <td colspan="2">Yes</td>
              </tr>
            </tbody>
          </table>
          <table-wrap-foot>
            <fn id="table2fn1">
              <p><sup>a</sup>The highest factor loadings.</p>
            </fn>
          </table-wrap-foot>
        </table-wrap>
        <fig id="figure3" position="float">
          <label>Figure 3</label>
          <caption>
            <p>Intercorrelation between the 8-expression perception factor. Squares represent the 27 observed dichotomous indicators and the expression recognition factors ovals.</p>
          </caption>
          <graphic xlink:href="formative_v7i1e44632_fig3.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Research Question 3</title>
        <p><xref ref-type="table" rid="table3">Table 3</xref> shows standardized thresholds (difficulty parameter), posterior SD, 95% credibility interval, and significance. F2 (ashamed, thresholds=1.881) might be seen as the more difficult face to be correctly rated, whereas the easiest was f16 (surprised; thresholds=–0.730).</p>
        <table-wrap position="float" id="table3">
          <label>Table 3</label>
          <caption>
            <p>Standardized thresholds (difficulty parameter), posterior SD, 95% credibility interval, and significance (yes/no).</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="120"/>
            <col width="180"/>
            <col width="220"/>
            <col width="350"/>
            <col width="130"/>
            <thead>
              <tr valign="top">
                <td>Face</td>
                <td>Threshold</td>
                <td>Posterior SD</td>
                <td>95% credibility interval</td>
                <td>Significance</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td>F1</td>
                <td>1.257</td>
                <td>0.085</td>
                <td>1.091 to 1.427</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F2</td>
                <td>1.881</td>
                <td>0.120</td>
                <td>1.661 to 2.133</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F3</td>
                <td>–0.249</td>
                <td>0.063</td>
                <td>–0.375 to –0.129</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F4</td>
                <td>–0.747</td>
                <td>0.069</td>
                <td>–0.879 to –0.609</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F5</td>
                <td>1.365</td>
                <td>0.090</td>
                <td>1.195 to 1.541</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F6</td>
                <td>–0.268</td>
                <td>0.063</td>
                <td>–0.390 to –0.144</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F7</td>
                <td>–0.047</td>
                <td>0.063</td>
                <td>–0.171 to 0.076</td>
                <td> No</td>
              </tr>
              <tr valign="top">
                <td>F8</td>
                <td>1.638</td>
                <td>0.102</td>
                <td>1.438 to 1.844</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F9</td>
                <td>–0.115</td>
                <td>0.063</td>
                <td>–0.237 to 0.012</td>
                <td> No</td>
              </tr>
              <tr valign="top">
                <td>F10</td>
                <td>1.121</td>
                <td>0.079</td>
                <td>0.965 to 1.278</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F11</td>
                <td>0.517</td>
                <td>0.066</td>
                <td>0.390 to 0.647</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F12</td>
                <td>0.692</td>
                <td>0.069</td>
                <td>0.555 to 0.830</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F13</td>
                <td>–0.378</td>
                <td>0.064</td>
                <td>–0.504 to –0.256</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F14</td>
                <td>–0.236</td>
                <td>0.063</td>
                <td>–0.358 to –0.112</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F15</td>
                <td>–0.390</td>
                <td>0.064</td>
                <td>–0.517 to –0.265</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F16</td>
                <td>–0.730</td>
                <td>0.069</td>
                <td>–0.864 to –0.596</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F17</td>
                <td>–0.131</td>
                <td>0.063</td>
                <td>–0.254 to –0.006</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F18</td>
                <td>1.323</td>
                <td>0.087</td>
                <td>1.153 to 1.497</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F19</td>
                <td>0.800</td>
                <td>0.069</td>
                <td>0.669 to 0.939</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F20</td>
                <td>–0.214</td>
                <td>0.063</td>
                <td>–0.338 to –0.090</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F21</td>
                <td>0.477</td>
                <td>0.066</td>
                <td>0.345 to 0.603</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F22</td>
                <td>0.535</td>
                <td>0.065</td>
                <td>0.406 to 0.661</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F23</td>
                <td>0.767</td>
                <td>0.068</td>
                <td>0.633 to 0.902</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F24</td>
                <td>0.043</td>
                <td>0.061</td>
                <td>–0.078 to 0.162</td>
                <td>No </td>
              </tr>
              <tr valign="top">
                <td>F25</td>
                <td>–0.636</td>
                <td>0.071</td>
                <td>–0.778 to –0.494</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F26</td>
                <td>–0.208</td>
                <td>0.065</td>
                <td>–0.336 to –0.079</td>
                <td>Yes</td>
              </tr>
              <tr valign="top">
                <td>F27</td>
                <td>0.150</td>
                <td>0.062</td>
                <td>0.026 to 0.269</td>
                <td>Yes</td>
              </tr>
            </tbody>
          </table>
        </table-wrap>
      </sec>
    </sec>
    <sec sec-type="discussion">
      <title>Discussion</title>
      <sec>
        <title>Overview</title>
        <p>The aim of this study was to evaluate the possibilities of using the face generator as a valid tool to quickly display various emotions transformed to avatars from actual profile pictures as a starting point. This would provide clinicians and trainers to quickly transform pictures of specific people into avatars. We tested the generated images in an unspecific population as an approach toward reducing potential confounding factors associated with deficits in evaluating emotion expressions.</p>
        <p>In this proof of concept of using avatars directly transformed from profile pictures, we found, in contrast to Samuelsson et al [<xref ref-type="bibr" rid="ref2">2</xref>], few “true” emotions in our sample. This indicates inaccuracies in perceiving the intended emotion from our avatars. Hence, in terms of our first research question, we cannot confirm that participants assessed the pictures according to the intended emotion. In investigating the second research question, we found that the factor loadings (ie, discrimination) are not high; only few faces showed factor loadings superior to 0.7, representing 49% of shared of variance with the latent factor where the face is linked on. The factor loadings indicate that the images endorse the minimum cutoff of 0.4 commonly used in the literature for a meaningful effect size, but this effect size should be considered with caution. 0.4 as a cutoff for factor loading represents 16% of common shared variance and consequently 84% is a measurement error. For clinical practice, we would suggest increasing the cutoff to closer to 0.7 in such a context, the images depicting higher factor loadings can be used for inspiration when trying to improve the discriminative features of other images. For example, F8 (interested) and F13 (surprised) showed the highest factor loadings even though they were considered more difficult (F8) and average difficulty (F13) to perceive according to the threshold values.</p>
        <p>Threshold values closer to +3 would indicate that the emotion within the image was difficult to perceive, whereas the values closer to –3 would indicate an easier face. Threshold values close to 0 and between –1 and +1 are considered average. When we evaluate the standardized threshold for each image (ie, the levels of difficulty) to answer our third research question, it looks as most thresholds are around average. This indicates that the tasks are not too difficult nor easy, as only 5 images could be considered difficult. This could be seen as good enough for the general population, as in our sample, which would benefit from having the whole spectrum of difficulty levels in the test. However, in specific populations that one could argue might find evaluating emotion expression a bit more challenging (eg, autism), the thresholds should be lower and thus easier. This suggests that there is a need for easier items than the ones represented in our images.</p>
        <p>Based on these results, we cannot confirm that plainly using software, exemplified with the Character Creator, is a valid approach on its own, given the lower effect size for the factor loading. Such usage might affect the validity evidence based on the response process of emotion recognition interventions that apply this approach. Furthermore, it is important to be aware of the possibility of whether interventions that might have used this kind of software have actually trained emotion recognition skills, or just tested the discriminative and difficulty of the presented facial expressions. That is, whether the interventionists are making fallacies about the effects on skills acquisition due to actually testing skills or whether they actually are just testing the ecological validity of the faces.</p>
        <p>In our opinion, it is of great importance to individually evaluate people’s perceived emotions. The software used in our study seems highly feasible and easy to use, especially as a starting point in creating the avatars from profile pictures. Using the software settings alone does not seem like a quick fix, but the software allows you to adjust and make detailed modifications to the faces which makes it quite usable for specific settings. Furthermore, this feature might even enhance the possibility of morphing the stimuli and thus combine and use avatars and real faces interchangeably when suitable, as suggested by Pino et al [<xref ref-type="bibr" rid="ref32">32</xref>]. When using avatars in research and clinical practice with individuals with autism or others that need tailored interventions, it is still important to validate each emotion before applying it in training. Generalization of skills is still a major issue in autism interventions and VR is proposed to mend on these issues; however, if the emotion expressed by avatars is not valid this could have detrimental effects rather than positive effects on generalization.</p>
        <p>There are some limitations to our study. First, it is important to consider the fact that this survey was conducted with a sample from the general population meaning it includes a broad spectrum of people, in a nonrandom selection process. Therefore, cautious inference must be taken when discussing the possible implications of the results to other specific contexts (eg, participants with autism). There are many variables that can affect the recognition of emotions [<xref ref-type="bibr" rid="ref2">2</xref>]. For example, the chosen profile pictures of the faces used in our study may have affected the results in some way. In addition, we have only used 1 software. The length of the survey may have led to fatigue that could affect the accuracy of the responses toward the end. There is a skewed distribution in terms of the gender of participants, as well as the cultural context. Therefore, our findings should be considered in this context. We have not controlled for any sequence effects since all images are presented in the same order for every participant due to the restrictions of the layout. As already mentioned, the PPP indicates that the factor model is acceptable but a better fit closer to .5 would be more preferable. Additionally, we suggest a higher cutoff score of the meaningful factor loading effect size as more preferable in a practical setting.</p>
      </sec>
      <sec>
        <title>Conclusion</title>
        <p>Applying available software for using real images when creating avatars with various emotions is not as straightforward as it seems. The avatars did not display what referred to as “true” emotions when assessed by our participants. Therefore, we cannot confirm that using such software alone provides valid emotion expressions. Through our survey, and the avatars created by the software, we found that individual adjustments might be needed to increase the discrimination, as well as the level of difficulty for various populations. We therefore suggest evaluating the emotions for each use specifically before applying them in interventions to ensure the respective validity of the findings (ie, avoiding the fallacy of actually again evaluating the photos instead of training emotion recognition).</p>
      </sec>
    </sec>
  </body>
  <back>
    <app-group>
      <supplementary-material id="app1">
        <label>Multimedia Appendix 1</label>
        <p>Distribution of participants in different age groups.</p>
        <media xlink:href="formative_v7i1e44632_app1.png" xlink:title="PNG File , 9 KB"/>
      </supplementary-material>
    </app-group>
    <glossary>
      <title>Abbreviations</title>
      <def-list>
        <def-item>
          <term id="abb1">AI</term>
          <def>
            <p>artificial intelligence</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb2">AR</term>
          <def>
            <p>augmented reality</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb3">HMD</term>
          <def>
            <p>head-mounted display</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb4">PPP</term>
          <def>
            <p>posterior predictive <italic>P</italic> value</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb5">PSR</term>
          <def>
            <p>proportional scale reduction</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb6">VR</term>
          <def>
            <p>virtual reality</p>
          </def>
        </def-item>
      </def-list>
    </glossary>
    <notes>
      <sec>
        <title>Data Availability</title>
        <p>The data sets generated and analyzed during this study are available from the corresponding author on reasonable request.</p>
      </sec>
    </notes>
    <fn-group>
      <fn fn-type="conflict">
        <p>None declared.</p>
      </fn>
    </fn-group>
    <ref-list>
      <ref id="ref1">
        <label>1</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Tottenham</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Tanaka</surname>
              <given-names>JW</given-names>
            </name>
            <name name-style="western">
              <surname>Leon</surname>
              <given-names>AC</given-names>
            </name>
            <name name-style="western">
              <surname>McCarry</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Nurse</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Hare</surname>
              <given-names>TA</given-names>
            </name>
            <name name-style="western">
              <surname>Marcus</surname>
              <given-names>DJ</given-names>
            </name>
            <name name-style="western">
              <surname>Westerlund</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Casey</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Nelson</surname>
              <given-names>C</given-names>
            </name>
          </person-group>
          <article-title>The NimStim set of facial expressions: judgments from untrained research participants</article-title>
          <source>Psychiatry Res</source>
          <year>2009</year>
          <volume>168</volume>
          <issue>3</issue>
          <fpage>242</fpage>
          <lpage>249</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/19564050"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.psychres.2008.05.006</pub-id>
          <pub-id pub-id-type="medline">19564050</pub-id>
          <pub-id pub-id-type="pii">S0165-1781(08)00148-0</pub-id>
          <pub-id pub-id-type="pmcid">PMC3474329</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref2">
        <label>2</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Samuelsson</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Jarnvik</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Henningsson</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Andersson</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Carlbring</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <article-title>The Umeå university database of facial expressions: a validation study</article-title>
          <source>J Med Internet Res</source>
          <year>2012</year>
          <volume>14</volume>
          <issue>5</issue>
          <fpage>e136</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.jmir.org/2012/5/e136/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/jmir.2196</pub-id>
          <pub-id pub-id-type="medline">23047935</pub-id>
          <pub-id pub-id-type="pii">v14i5e136</pub-id>
          <pub-id pub-id-type="pmcid">PMC3510711</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref3">
        <label>3</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hakamata</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Lissek</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Bar-Haim</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Britton</surname>
              <given-names>JC</given-names>
            </name>
            <name name-style="western">
              <surname>Fox</surname>
              <given-names>NA</given-names>
            </name>
            <name name-style="western">
              <surname>Leibenluft</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Ernst</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Pine</surname>
              <given-names>DS</given-names>
            </name>
          </person-group>
          <article-title>Attention bias modification treatment: a meta-analysis toward the establishment of novel treatment for anxiety</article-title>
          <source>Biol Psychiatry</source>
          <year>2010</year>
          <volume>68</volume>
          <issue>11</issue>
          <fpage>982</fpage>
          <lpage>990</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/20887977"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.biopsych.2010.07.021</pub-id>
          <pub-id pub-id-type="medline">20887977</pub-id>
          <pub-id pub-id-type="pii">S0006-3223(10)00766-3</pub-id>
          <pub-id pub-id-type="pmcid">PMC3296778</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref4">
        <label>4</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Golan</surname>
              <given-names>O</given-names>
            </name>
            <name name-style="western">
              <surname>Sinai-Gavrilov</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Baron-Cohen</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>The Cambridge mindreading face-voice battery for children (CAM-C): complex emotion recognition in children with and without autism spectrum conditions</article-title>
          <source>Mol Autism</source>
          <year>2015</year>
          <volume>6</volume>
          <fpage>22</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://molecularautism.biomedcentral.com/articles/10.1186/s13229-015-0018-z"/>
          </comment>
          <pub-id pub-id-type="doi">10.1186/s13229-015-0018-z</pub-id>
          <pub-id pub-id-type="medline">25932320</pub-id>
          <pub-id pub-id-type="pii">18</pub-id>
          <pub-id pub-id-type="pmcid">PMC4415441</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref5">
        <label>5</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kumazaki</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Warren</surname>
              <given-names>Z</given-names>
            </name>
            <name name-style="western">
              <surname>Swanson</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Yoshikawa</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Matsumoto</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Yoshimura</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Shimaya</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Ishiguro</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Sarkar</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Wade</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Mimura</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Minabe</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Kikuchi</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Brief report: evaluating the utility of varied technological agents to elicit social attention from children with autism spectrum disorders</article-title>
          <source>J Autism Dev Disord</source>
          <year>2019</year>
          <volume>49</volume>
          <issue>4</issue>
          <fpage>1700</fpage>
          <lpage>1708</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/30511126"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s10803-018-3841-1</pub-id>
          <pub-id pub-id-type="medline">30511126</pub-id>
          <pub-id pub-id-type="pii">10.1007/s10803-018-3841-1</pub-id>
          <pub-id pub-id-type="pmcid">PMC6450844</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref6">
        <label>6</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <collab>American Psychiatric Association</collab>
          </person-group>
          <source>Diagnostic and Statistical Manual of Mental Disorders, 5th Edition (DSM-5)</source>
          <year>2013</year>
          <publisher-loc>Arlington, VA</publisher-loc>
          <publisher-name>American Psychiatric Publishing</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref7">
        <label>7</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lord</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Brugha</surname>
              <given-names>TS</given-names>
            </name>
            <name name-style="western">
              <surname>Charman</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Cusack</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Dumas</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Frazier</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Jones</surname>
              <given-names>EJH</given-names>
            </name>
            <name name-style="western">
              <surname>Jones</surname>
              <given-names>RM</given-names>
            </name>
            <name name-style="western">
              <surname>Pickles</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>State</surname>
              <given-names>MW</given-names>
            </name>
            <name name-style="western">
              <surname>Taylor</surname>
              <given-names>JL</given-names>
            </name>
            <name name-style="western">
              <surname>Veenstra-VanderWeele</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Autism spectrum disorder</article-title>
          <source>Nat Rev Dis Primers</source>
          <year>2020</year>
          <volume>6</volume>
          <issue>1</issue>
          <fpage>5</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://hal.archives-ouvertes.fr/pasteur-02445082"/>
          </comment>
          <pub-id pub-id-type="doi">10.1038/s41572-019-0138-4</pub-id>
          <pub-id pub-id-type="medline">31949163</pub-id>
          <pub-id pub-id-type="pii">10.1038/s41572-019-0138-4</pub-id>
          <pub-id pub-id-type="pmcid">PMC8900942</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref8">
        <label>8</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Zeidan</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Fombonne</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Scorah</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Ibrahim</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Durkin</surname>
              <given-names>MS</given-names>
            </name>
            <name name-style="western">
              <surname>Saxena</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Yusuf</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Shih</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Elsabbagh</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Global prevalence of autism: a systematic review update</article-title>
          <source>Autism Res</source>
          <year>2022</year>
          <volume>15</volume>
          <issue>5</issue>
          <fpage>778</fpage>
          <lpage>790</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/35238171"/>
          </comment>
          <pub-id pub-id-type="doi">10.1002/aur.2696</pub-id>
          <pub-id pub-id-type="medline">35238171</pub-id>
          <pub-id pub-id-type="pmcid">PMC9310578</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref9">
        <label>9</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Davis</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>den Houting</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Nordahl-Hansen</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Fletcher-Watson</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <person-group person-group-type="editor">
            <name name-style="western">
              <surname>Smith</surname>
              <given-names>PK</given-names>
            </name>
            <name name-style="western">
              <surname>Hart</surname>
              <given-names>CH</given-names>
            </name>
          </person-group>
          <article-title>Helping autistic children</article-title>
          <source>The Wiley-Blackwell Handbook of Childhood Social Development</source>
          <year>2022</year>
          <publisher-loc>Chichester</publisher-loc>
          <publisher-name>John Wiley and Sons</publisher-name>
          <fpage>729</fpage>
          <lpage>746</lpage>
        </nlm-citation>
      </ref>
      <ref id="ref10">
        <label>10</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Gale</surname>
              <given-names>CM</given-names>
            </name>
            <name name-style="western">
              <surname>Eikeseth</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Klintwall</surname>
              <given-names>L</given-names>
            </name>
          </person-group>
          <article-title>Children with autism show atypical preference for non-social stimuli</article-title>
          <source>Sci Rep</source>
          <year>2019</year>
          <volume>9</volume>
          <issue>1</issue>
          <fpage>10355</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1038/s41598-019-46705-8"/>
          </comment>
          <pub-id pub-id-type="doi">10.1038/s41598-019-46705-8</pub-id>
          <pub-id pub-id-type="medline">31316161</pub-id>
          <pub-id pub-id-type="pii">10.1038/s41598-019-46705-8</pub-id>
          <pub-id pub-id-type="pmcid">PMC6637109</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref11">
        <label>11</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wolstencroft</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Robinson</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Srinivasan</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Kerry</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Mandy</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Skuse</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <article-title>A systematic review of group social skills interventions, and meta-analysis of outcomes, for children with high functioning ASD</article-title>
          <source>J Autism Dev Disord</source>
          <year>2018</year>
          <volume>48</volume>
          <issue>7</issue>
          <fpage>2293</fpage>
          <lpage>2307</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/29423608"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s10803-018-3485-1</pub-id>
          <pub-id pub-id-type="medline">29423608</pub-id>
          <pub-id pub-id-type="pii">10.1007/s10803-018-3485-1</pub-id>
          <pub-id pub-id-type="pmcid">PMC5996019</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref12">
        <label>12</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Fridenson-Hayo</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Berggren</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Lassalle</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Tal</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Pigat</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Meir-Goren</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>O'Reilly</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Ben-Zur</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Bölte</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Baron-Cohen</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Golan</surname>
              <given-names>O</given-names>
            </name>
          </person-group>
          <article-title>'Emotiplay': a serious game for learning about emotions in children with autism: results of a cross-cultural evaluation</article-title>
          <source>Eur Child Adolesc Psychiatry</source>
          <year>2017</year>
          <volume>26</volume>
          <issue>8</issue>
          <fpage>979</fpage>
          <lpage>992</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://link.springer.com/article/10.1007/s00787-017-0968-0"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s00787-017-0968-0</pub-id>
          <pub-id pub-id-type="medline">28275895</pub-id>
          <pub-id pub-id-type="pii">10.1007/s00787-017-0968-0</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref13">
        <label>13</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Uljarevic</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Hamilton</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Recognition of emotions in autism: a formal meta-analysis</article-title>
          <source>J Autism Dev Disord</source>
          <year>2013</year>
          <volume>43</volume>
          <issue>7</issue>
          <fpage>1517</fpage>
          <lpage>1526</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://link.springer.com/article/10.1007/s10803-012-1695-5"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s10803-012-1695-5</pub-id>
          <pub-id pub-id-type="medline">23114566</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref14">
        <label>14</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hudepohl</surname>
              <given-names>MB</given-names>
            </name>
            <name name-style="western">
              <surname>Robins</surname>
              <given-names>DL</given-names>
            </name>
            <name name-style="western">
              <surname>King</surname>
              <given-names>TZ</given-names>
            </name>
            <name name-style="western">
              <surname>Henrich</surname>
              <given-names>CC</given-names>
            </name>
          </person-group>
          <article-title>The role of emotion perception in adaptive functioning of people with autism spectrum disorders</article-title>
          <source>Autism</source>
          <year>2015</year>
          <volume>19</volume>
          <issue>1</issue>
          <fpage>107</fpage>
          <lpage>112</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://journals.sagepub.com/doi/10.1177/1362361313512725"/>
          </comment>
          <pub-id pub-id-type="doi">10.1177/1362361313512725</pub-id>
          <pub-id pub-id-type="medline">24335115</pub-id>
          <pub-id pub-id-type="pii">1362361313512725</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref15">
        <label>15</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Tsang</surname>
              <given-names>V</given-names>
            </name>
          </person-group>
          <article-title>Eye-tracking study on facial emotion recognition tasks in individuals with high-functioning autism spectrum disorders</article-title>
          <source>Autism</source>
          <year>2018</year>
          <volume>22</volume>
          <issue>2</issue>
          <fpage>161</fpage>
          <lpage>170</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://journals.sagepub.com/doi/10.1177/1362361316667830"/>
          </comment>
          <pub-id pub-id-type="doi">10.1177/1362361316667830</pub-id>
          <pub-id pub-id-type="medline">29490486</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref16">
        <label>16</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hopkins</surname>
              <given-names>IM</given-names>
            </name>
            <name name-style="western">
              <surname>Gower</surname>
              <given-names>MW</given-names>
            </name>
            <name name-style="western">
              <surname>Perez</surname>
              <given-names>TA</given-names>
            </name>
            <name name-style="western">
              <surname>Smith</surname>
              <given-names>DS</given-names>
            </name>
            <name name-style="western">
              <surname>Amthor</surname>
              <given-names>FR</given-names>
            </name>
            <name name-style="western">
              <surname>Wimsatt</surname>
              <given-names>FC</given-names>
            </name>
            <name name-style="western">
              <surname>Biasini</surname>
              <given-names>FJ</given-names>
            </name>
          </person-group>
          <article-title>Avatar assistant: improving social skills in students with an ASD through a computer-based intervention</article-title>
          <source>J Autism Dev Disord</source>
          <year>2011</year>
          <volume>41</volume>
          <issue>11</issue>
          <fpage>1543</fpage>
          <lpage>1555</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://link.springer.com/article/10.1007/s10803-011-1179-z"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s10803-011-1179-z</pub-id>
          <pub-id pub-id-type="medline">21287255</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref17">
        <label>17</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bölte</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Golan</surname>
              <given-names>O</given-names>
            </name>
            <name name-style="western">
              <surname>Goodwin</surname>
              <given-names>MS</given-names>
            </name>
            <name name-style="western">
              <surname>Zwaigenbaum</surname>
              <given-names>L</given-names>
            </name>
          </person-group>
          <article-title>What can innovative technologies do for autism spectrum disorders?</article-title>
          <source>Autism</source>
          <year>2010</year>
          <volume>14</volume>
          <issue>3</issue>
          <fpage>155</fpage>
          <lpage>159</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://journals.sagepub.com/doi/pdf/10.1177/1362361310365028"/>
          </comment>
          <pub-id pub-id-type="doi">10.1177/1362361310365028</pub-id>
          <pub-id pub-id-type="medline">20603897</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref18">
        <label>18</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Dechsling</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Orm</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Kalandadze</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Sütterlin</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Øien</surname>
              <given-names>RA</given-names>
            </name>
            <name name-style="western">
              <surname>Shic</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Nordahl-Hansen</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Virtual and augmented reality in social skills interventions for individuals with autism spectrum disorder: a scoping review</article-title>
          <source>J Autism Dev Disord</source>
          <year>2022</year>
          <volume>52</volume>
          <issue>11</issue>
          <fpage>4692</fpage>
          <lpage>4707</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/34783991"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s10803-021-05338-5</pub-id>
          <pub-id pub-id-type="medline">34783991</pub-id>
          <pub-id pub-id-type="pii">10.1007/s10803-021-05338-5</pub-id>
          <pub-id pub-id-type="pmcid">PMC9556391</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref19">
        <label>19</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Howard</surname>
              <given-names>MC</given-names>
            </name>
            <name name-style="western">
              <surname>Gutworth</surname>
              <given-names>MB</given-names>
            </name>
          </person-group>
          <article-title>A meta-analysis of virtual reality training programs for social skill development</article-title>
          <source>Comput Educ</source>
          <year>2020</year>
          <volume>144</volume>
          <fpage>103707</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.sciencedirect.com/science/article/abs/pii/S036013151930260X"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.compedu.2019.103707</pub-id>
          <pub-id pub-id-type="pii">S036013151930260X</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref20">
        <label>20</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Cipresso</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Giglioli</surname>
              <given-names>IAC</given-names>
            </name>
            <name name-style="western">
              <surname>Raya</surname>
              <given-names>MA</given-names>
            </name>
            <name name-style="western">
              <surname>Riva</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>The past, present, and future of virtual and augmented reality research: a network and cluster analysis of the literature</article-title>
          <source>Front Psychol</source>
          <year>2018</year>
          <volume>9</volume>
          <fpage>2086</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.frontiersin.org/articles/10.3389/fpsyg.2018.02086/full"/>
          </comment>
          <pub-id pub-id-type="doi">10.3389/fpsyg.2018.02086</pub-id>
          <pub-id pub-id-type="medline">30459681</pub-id>
          <pub-id pub-id-type="pmcid">PMC6232426</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref21">
        <label>21</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wu</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>SW</given-names>
            </name>
            <name name-style="western">
              <surname>Chang</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Liang</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Current status, opportunities and challenges of augmented reality in education</article-title>
          <source>Comput Educ</source>
          <year>2013</year>
          <volume>62</volume>
          <fpage>41</fpage>
          <lpage>49</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.sciencedirect.com/science/article/abs/pii/S0360131512002527"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.compedu.2012.10.024</pub-id>
          <pub-id pub-id-type="pii">S0360131512002527</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref22">
        <label>22</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Ip</surname>
              <given-names>HH</given-names>
            </name>
            <name name-style="western">
              <surname>Wong</surname>
              <given-names>SW</given-names>
            </name>
            <name name-style="western">
              <surname>Chan</surname>
              <given-names>DF</given-names>
            </name>
            <name name-style="western">
              <surname>Byrne</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Yuan</surname>
              <given-names>VS</given-names>
            </name>
            <name name-style="western">
              <surname>Lau</surname>
              <given-names>KS</given-names>
            </name>
            <name name-style="western">
              <surname>Wong</surname>
              <given-names>JY</given-names>
            </name>
          </person-group>
          <article-title>Enhance emotional and social adaptation skills for children with autism spectrum disorder: a virtual reality enabled approach</article-title>
          <source>Comput Educ</source>
          <year>2018</year>
          <volume>117</volume>
          <fpage>1</fpage>
          <lpage>15</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.sciencedirect.com/science/article/abs/pii/S0360131517302142"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.compedu.2017.09.010</pub-id>
          <pub-id pub-id-type="pii">S0360131517302142</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref23">
        <label>23</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bradley</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Newbutt</surname>
              <given-names>N</given-names>
            </name>
          </person-group>
          <article-title>Autism and virtual reality head-mounted displays: a state of the art systematic review</article-title>
          <source>J Enabling Technol</source>
          <year>2018</year>
          <volume>12</volume>
          <issue>3</issue>
          <fpage>101</fpage>
          <lpage>113</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.emerald.com/insight/content/doi/10.1108/JET-01-2018-0004/full/html"/>
          </comment>
          <pub-id pub-id-type="doi">10.1108/jet-01-2018-0004</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref24">
        <label>24</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Dechsling</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Sütterlin</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Nordahl-Hansen</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Acceptability and normative considerations in research on autism spectrum disorders and virtual reality</article-title>
          <source>Augmented Cognition: Human Cognition and Behavior</source>
          <year>2020</year>
          <conf-name>14th International Conference, AC 2020, Held as Part of the 22nd HCI International Conference, HCII 2020</conf-name>
          <conf-date>July 19–24, 2020</conf-date>
          <conf-loc>Copenhagen, Denmark</conf-loc>
          <publisher-loc>Switzerland</publisher-loc>
          <publisher-name>Springer</publisher-name>
          <fpage>161</fpage>
          <lpage>170</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://link.springer.com/chapter/10.1007/978-3-030-50439-7_11"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/978-3-030-50439-7_11</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref25">
        <label>25</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lorenzo</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Newbutt</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Lorenzo-Lledó</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Global trends in the application of virtual reality for people with autism spectrum disorders: conceptual, intellectual and the social structure of scientific production</article-title>
          <source>J. Comput. Educ</source>
          <year>2021</year>
          <month>09</month>
          <day>08</day>
          <volume>9</volume>
          <issue>2</issue>
          <fpage>225</fpage>
          <lpage>260</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://link.springer.com/article/10.1007/s40692-021-00202-y"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s40692-021-00202-y</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref26">
        <label>26</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bauer</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Bouchara</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Duris</surname>
              <given-names>O</given-names>
            </name>
            <name name-style="western">
              <surname>Labossière</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Clément</surname>
              <given-names>M-N</given-names>
            </name>
            <name name-style="western">
              <surname>Bourdot</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <article-title>Evaluating the acceptability and usability of a head-mounted augmented reality approach for autistic children with high support needs</article-title>
          <source>Virtual Reality and Mixed Reality</source>
          <year>2022</year>
          <conf-name>19th EuroXR International Conference, EuroXR 2022</conf-name>
          <conf-date>September 14–16, 2022</conf-date>
          <conf-loc>Stuttgart, Germany</conf-loc>
          <publisher-loc>Cham</publisher-loc>
          <publisher-name>Springer</publisher-name>
          <fpage>53</fpage>
          <lpage>72</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://link.springer.com/chapter/10.1007/978-3-031-16234-3_4"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/978-3-031-16234-3_4</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref27">
        <label>27</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mosher</surname>
              <given-names>MA</given-names>
            </name>
            <name name-style="western">
              <surname>Carreon</surname>
              <given-names>AC</given-names>
            </name>
            <name name-style="western">
              <surname>Craig</surname>
              <given-names>SL</given-names>
            </name>
            <name name-style="western">
              <surname>Ruhter</surname>
              <given-names>LC</given-names>
            </name>
          </person-group>
          <article-title>Immersive technology to teach social skills to students with autism spectrum disorder: a literature review</article-title>
          <source>Rev J Autism Dev Disord</source>
          <year>2021</year>
          <volume>9</volume>
          <issue>3</issue>
          <fpage>334</fpage>
          <lpage>350</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://link.springer.com/article/10.1007/s40489-021-00259-6"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s40489-021-00259-6</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref28">
        <label>28</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Farashi</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Bashirian</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Jenabi</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Razjouyan</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>Effectiveness of virtual reality and computerized training programs for enhancing emotion recognition in people with autism spectrum disorder: a systematic review and meta-analysis</article-title>
          <source>Int J Dev Disabil</source>
          <year>2022</year>
          <fpage>1</fpage>
          <lpage>17</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.tandfonline.com/doi/abs/10.1080/20473869.2022.2063656"/>
          </comment>
          <pub-id pub-id-type="doi">10.1080/20473869.2022.2063656</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref29">
        <label>29</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kraft</surname>
              <given-names>MA</given-names>
            </name>
          </person-group>
          <article-title>Interpreting effect sizes of education interventions</article-title>
          <source>Edu Res</source>
          <year>2020</year>
          <month>04</month>
          <day>27</day>
          <volume>49</volume>
          <issue>4</issue>
          <fpage>241</fpage>
          <lpage>253</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://journals.sagepub.com/doi/abs/10.3102/0013189X20912798"/>
          </comment>
          <pub-id pub-id-type="doi">10.3102/0013189x20912798</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref30">
        <label>30</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Salisbury</surname>
              <given-names>JP</given-names>
            </name>
            <name name-style="western">
              <surname>Vahabzadeh</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Sahin</surname>
              <given-names>NT</given-names>
            </name>
          </person-group>
          <article-title>Feasibility of an autism-focused augmented reality smartglasses system for social communication and behavioral coaching</article-title>
          <source>Front Pediatr</source>
          <year>2017</year>
          <volume>5</volume>
          <fpage>145</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/28695116"/>
          </comment>
          <pub-id pub-id-type="doi">10.3389/fped.2017.00145</pub-id>
          <pub-id pub-id-type="medline">28695116</pub-id>
          <pub-id pub-id-type="pmcid">PMC5483849</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref31">
        <label>31</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Vahabzadeh</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Keshav</surname>
              <given-names>NU</given-names>
            </name>
            <name name-style="western">
              <surname>Abdus-Sabur</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Huey</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Sahin</surname>
              <given-names>NT</given-names>
            </name>
          </person-group>
          <article-title>Improved socio-emotional and behavioral functioning in students with autism following school-based smartglasses intervention: multi-stage feasibility and controlled efficacy study</article-title>
          <source>Behav Sci (Basel)</source>
          <year>2018</year>
          <volume>8</volume>
          <issue>10</issue>
          <fpage>85</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=bs8100085"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/bs8100085</pub-id>
          <pub-id pub-id-type="medline">30241313</pub-id>
          <pub-id pub-id-type="pii">bs8100085</pub-id>
          <pub-id pub-id-type="pmcid">PMC6209889</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref32">
        <label>32</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Pino</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Vagnetti</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Valenti</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Mazza</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Comparing virtual vs real faces expressing emotions in children with autism: An eye-tracking study</article-title>
          <source>Educ Inf Technol</source>
          <year>2021</year>
          <volume>26</volume>
          <issue>5</issue>
          <fpage>5717</fpage>
          <lpage>5732</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://link.springer.com/article/10.1007/s10639-021-10552-w"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s10639-021-10552-w</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref33">
        <label>33</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lambrecht</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Kreifelts</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Wildgruber</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <article-title>Gender differences in emotion recognition: impact of sensory modality and emotional category</article-title>
          <source>Cogn Emot</source>
          <year>2014</year>
          <volume>28</volume>
          <issue>3</issue>
          <fpage>452</fpage>
          <lpage>469</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.tandfonline.com/doi/abs/10.1080/02699931.2013.837378"/>
          </comment>
          <pub-id pub-id-type="doi">10.1080/02699931.2013.837378</pub-id>
          <pub-id pub-id-type="medline">24151963</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref34">
        <label>34</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Tsai</surname>
              <given-names>W-T</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>I-J</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>C-H</given-names>
            </name>
          </person-group>
          <article-title>Inclusion of third-person perspective in CAVE-like immersive 3D virtual reality role-playing games for social reciprocity training of children with an autism spectrum disorder</article-title>
          <source>Univ Access Inf Soc</source>
          <year>2020</year>
          <month>06</month>
          <day>02</day>
          <volume>20</volume>
          <issue>2</issue>
          <fpage>375</fpage>
          <lpage>389</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://link.springer.com/article/10.1007/s10209-020-00724-9"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s10209-020-00724-9</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref35">
        <label>35</label>
        <nlm-citation citation-type="web">
          <source>RANDOM.ORG</source>
          <access-date>2023-04-21</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.random.org/">https://www.random.org/</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref36">
        <label>36</label>
        <nlm-citation citation-type="web">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Asparouhov</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Muthén</surname>
              <given-names>B</given-names>
            </name>
          </person-group>
          <source>Bayesian analysis of latent variable models using Mplus</source>
          <year>2010</year>
          <access-date>2023-04-19</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.statmodel.com/download/BayesAdvantages18.pdf">https://www.statmodel.com/download/BayesAdvantages18.pdf</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref37">
        <label>37</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Gelman</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Two simple examples for understanding posterior p-values whose distributions are far from uniform</article-title>
          <source>Electron J Statist</source>
          <year>2013</year>
          <volume>7</volume>
          <fpage>2595</fpage>
          <lpage>2602</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://projecteuclid.org/journals/electronic-journal-of-statistics/volume-7/issue-none/Two-simple-examples-for-understanding-posterior-p-values-whose-distributions/10.1214/13-EJS854.full"/>
          </comment>
          <pub-id pub-id-type="doi">10.1214/13-ejs854</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref38">
        <label>38</label>
        <nlm-citation citation-type="web">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Muthén</surname>
              <given-names>B</given-names>
            </name>
          </person-group>
          <source>Bayesian analysis in Mplus: a brief introduction</source>
          <year>2010</year>
          <access-date>2023-04-19</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.statmodel.com/download/IntroBayesVersion%203.pdf">https://www.statmodel.com/download/IntroBayesVersion%203.pdf</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref39">
        <label>39</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Nunnally</surname>
              <given-names>JC</given-names>
            </name>
          </person-group>
          <source>Psychometric Methods, 2nd ed</source>
          <year>1978</year>
          <publisher-loc>New York</publisher-loc>
          <publisher-name>McGraw-Hill</publisher-name>
        </nlm-citation>
      </ref>
    </ref-list>
  </back>
</article>
