<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "journalpublishing.dtd"><article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" dtd-version="2.0" xml:lang="en" article-type="research-article"><front><journal-meta><journal-id journal-id-type="nlm-ta">JMIR Form Res</journal-id><journal-id journal-id-type="publisher-id">formative</journal-id><journal-id journal-id-type="index">27</journal-id><journal-title>JMIR Formative Research</journal-title><abbrev-journal-title>JMIR Form Res</abbrev-journal-title><issn pub-type="epub">2561-326X</issn><publisher><publisher-name>JMIR Publications</publisher-name><publisher-loc>Toronto, Canada</publisher-loc></publisher></journal-meta><article-meta><article-id pub-id-type="publisher-id">v9i1e71004</article-id><article-id pub-id-type="doi">10.2196/71004</article-id><article-categories><subj-group subj-group-type="heading"><subject>Original Paper</subject></subj-group></article-categories><title-group><article-title>Avatar Customization and Embodiment in Virtual Reality Self-Compassion Therapy for Depressive Symptoms: Three-Part Mixed Methods Study</article-title></title-group><contrib-group><contrib contrib-type="author" corresp="yes" equal-contrib="yes"><name name-style="western"><surname>Elliott</surname><given-names>Thomas C</given-names></name><degrees>BSc, MIDes</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="fn" rid="equal-contrib1">*</xref></contrib><contrib contrib-type="author" equal-contrib="yes"><name name-style="western"><surname>Yang</surname><given-names>Yanzhuo</given-names></name><degrees>BEng, MIDes</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="fn" rid="equal-contrib1">*</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Knibbe</surname><given-names>Jarrod</given-names></name><degrees>MEng, PhD</degrees><xref ref-type="aff" rid="aff1">1</xref></contrib><contrib contrib-type="author" equal-contrib="yes"><name name-style="western"><surname>Henry</surname><given-names>Julie D</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff2">2</xref><xref ref-type="fn" rid="equal-contrib1">*</xref></contrib><contrib contrib-type="author" equal-contrib="yes"><name name-style="western"><surname>Baghaei</surname><given-names>Nilufar</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="fn" rid="equal-contrib1">*</xref></contrib></contrib-group><aff id="aff1"><institution>School of Electrical Engineering and Computer Science, Faculty of Engineering, Architecture and Information Technology, The University of Queensland</institution><addr-line>General Purpose South (78), St Lucia QLD 4067 Level 4</addr-line><addr-line>Brisbane</addr-line><country>Australia</country></aff><aff id="aff2"><institution>School of Psychology, Faculty of Health and Behavioural Sciences, The University of Queensland</institution><addr-line>Brisbane</addr-line><country>Australia</country></aff><contrib-group><contrib contrib-type="editor"><name name-style="western"><surname>Mavragani</surname><given-names>Amaryllis</given-names></name></contrib></contrib-group><contrib-group><contrib contrib-type="reviewer"><name name-style="western"><surname>Weidner</surname><given-names>Florian</given-names></name></contrib><contrib contrib-type="reviewer"><name name-style="western"><surname>Zempo</surname><given-names>Keiichi</given-names></name></contrib></contrib-group><author-notes><corresp>Correspondence to Thomas C Elliott, BSc, MIDes, School of Electrical Engineering and Computer Science, Faculty of Engineering, Architecture and Information Technology, The University of Queensland, General Purpose South (78), St Lucia QLD 4067 Level 4, Brisbane, 4067, Australia, 61 0733652097; <email>t.elliott1@student.uq.edu.au</email></corresp><fn fn-type="equal" id="equal-contrib1"><label>*</label><p>these authors contributed equally</p></fn></author-notes><pub-date pub-type="collection"><year>2025</year></pub-date><pub-date pub-type="epub"><day>2</day><month>10</month><year>2025</year></pub-date><volume>9</volume><elocation-id>e71004</elocation-id><history><date date-type="received"><day>08</day><month>01</month><year>2025</year></date><date date-type="rev-recd"><day>19</day><month>08</month><year>2025</year></date><date date-type="accepted"><day>30</day><month>08</month><year>2025</year></date></history><copyright-statement>&#x00A9; Thomas C Elliott, Yanzhuo Yang, Jarrod Knibbe, Julie D Henry, Nilufar Baghaei. Originally published in JMIR Formative Research (<ext-link ext-link-type="uri" xlink:href="https://formative.jmir.org">https://formative.jmir.org</ext-link>), 2.10.2025. </copyright-statement><copyright-year>2025</copyright-year><license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (<ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">https://creativecommons.org/licenses/by/4.0/</ext-link>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in JMIR Formative Research, is properly cited. The complete bibliographic information, a link to the original publication on <ext-link ext-link-type="uri" xlink:href="https://formative.jmir.org">https://formative.jmir.org</ext-link>, as well as this copyright and license information must be included.</p></license><self-uri xlink:type="simple" xlink:href="https://formative.jmir.org/2025/1/e71004"/><abstract><sec><title>Background</title><p>As virtual reality technologies become more accessible, understanding how design features influence user experience (UX) and psychological benefit is critical, particularly for emotionally sensitive interventions. Thus, while prior studies support the use of self-compassion paradigms in immersive virtual reality (VR) environments, the effects of avatar stylization, customization, and mirrored self-representation on therapeutic outcomes are not well understood. For instance, while it is often assumed that increasingly realistic avatars are preferable to less realistic ones, this basic premise remains largely untested.</p></sec><sec><title>Objective</title><p>This study aimed to evaluate whether avatar appearance, customization features, and virtual mirrors affect UX and therapeutic outcomes in VR self-compassion therapy. Specifically, we examined whether stylized avatars, avatar customization, and virtual mirror feedback influenced user-rated self-compassion and depression symptoms.</p></sec><sec sec-type="methods"><title>Methods</title><p>Across three between-subjects studies (N=107 neurotypical adults), participants engaged in an immersive individualized VR therapy protocol based on a 2-phase compassion task. The conditions were (1) stylized avatars (n=20), (2) stylized customizable avatars (n=49), and (3) stylized customizable avatars with a virtual mirror (n=38). Participants completed the User Experience Questionnaire, the Self-Compassion Scale, and the 8-item Patient Health Questionnaire (PHQ-8). In study 3, presence was also assessed using the Slater-Usoh-Steed scale. Qualitative feedback was analyzed thematically. Between- and within-study comparisons used <italic>t</italic> tests and Mann-Whitney <italic>U</italic> tests.</p></sec><sec sec-type="results"><title>Results</title><p>Avatar customization (study 2) led to a significant increase in self-compassion (Self-Compassion Scale: baseline mean 3.05, SD 0.98; follow-up mean 3.55, SD 1.16; <italic>t</italic><sub>89</sub>=2.22; <italic>P</italic>=.03; <italic>d</italic>=&#x2013;0.47), though PHQ-8 scores remained unchanged. The virtual mirror condition (study 3) significantly improved depression scores (PHQ-8: <italic>U</italic>=477.5; <italic>z</italic>=2.53; <italic>P</italic>=.01; <italic>r</italic>=0.30) and UX across four User Experience Questionnaire categories, including attractiveness and dependability. However, self-compassion did not significantly change in study 3 (mean 3.88, SD 1.33 &#x2192; mean 4.09, SD 1.05; <italic>t</italic><sub>63</sub>=0.71; <italic>P</italic>=.47; <italic>d</italic>=0.18). Presence scores in study 3 (mean 4.56, SD 1.58) were also comparable to real-world benchmarks. Qualitative feedback highlighted strong engagement with avatars and mirrors, and participants reported emotional safety and personalization benefits.</p></sec><sec sec-type="conclusions"><title>Conclusions</title><p>Stylized avatars, when paired with customization and mirrored embodiment, can support UX and therapeutic benefit in VR self-compassion therapy. These findings challenge the assumption that hyperrealistic avatars are superior and highlight the importance of emotionally congruent design choices. The combination of stylization, individualization, and visual feedback may offer a low-barrier, user-aligned strategy for future therapeutic VR applications.</p></sec></abstract><kwd-group><kwd>avatar design</kwd><kwd>virtual reality</kwd><kwd>self-compassion</kwd><kwd>depression</kwd><kwd>mental health</kwd><kwd>user experience</kwd><kwd>uncanny valley</kwd></kwd-group></article-meta></front><body><sec id="s1" sec-type="intro"><title>Introduction</title><p>Virtual reality (VR) is emerging as a promising tool in therapeutic interventions for depression [<xref ref-type="bibr" rid="ref1">1</xref>,<xref ref-type="bibr" rid="ref2">2</xref>]. One approach involves users embodying avatars to give and receive compassion, which has been shown to lead to positive self-compassion outcomes. Customization of avatars and environments may improve these effects [<xref ref-type="bibr" rid="ref3">3</xref>], yet the impact of such design decisions has not been systematically tested. Consequently, their influence on therapeutic outcomes remains unclear.</p><p>Much is already known about embodiment-related constructs relevant to VR therapy, including body ownership, agency, presence, and immersion [<xref ref-type="bibr" rid="ref4">4</xref>-<xref ref-type="bibr" rid="ref11">11</xref>]. These concepts provided were important theoretical grounding in support of a similar VR therapy study [<xref ref-type="bibr" rid="ref12">12</xref>] and are therefore reviewed here to provide theoretical grounding in the context of the present study. Embodiment has been defined as the sense that the properties of a virtual body are experienced as one&#x2019;s own [<xref ref-type="bibr" rid="ref13">13</xref>]. Supporting conditions for agency, self-location, and ownership contribute to embodiment and can be facilitated by a first-person perspective and visuomotor synchrony.</p><p>Presence is critical to this study, enabling behavioral engagement in VR. It depends in part on embodiment&#x2014;specifically on recognizing a virtual body as one&#x2019;s own [<xref ref-type="bibr" rid="ref14">14</xref>]. Immersion, however, refers to the technical capacity of the system to produce realistic and interactive environments [<xref ref-type="bibr" rid="ref11">11</xref>]. To support immersion, this study used the Meta Quest 2 head-mounted display to support high immersion through multisensory stimulation [<xref ref-type="bibr" rid="ref15">15</xref>].</p><p>Beyond these foundational elements, this study is situated within the domain of individualized VR (iVR), where VR experiences are tailored to the user&#x2014;often through customization options. iVR has been shown to improve both user experience (UX) and therapeutic outcomes [<xref ref-type="bibr" rid="ref2">2</xref>,<xref ref-type="bibr" rid="ref3">3</xref>]. Customization of VR features such as adjusting game difficulty, audio, and objects from virtual environments has also been beneficial for the user [<xref ref-type="bibr" rid="ref16">16</xref>-<xref ref-type="bibr" rid="ref19">19</xref>]. Within iVR, avatar customization is one of many possibilities that allow users to modify the avatar&#x2019;s face, body, and clothing. Prior studies have shown that customization of an avatar has benefits for users with respect to identity, learning, inclusion, and engagement [<xref ref-type="bibr" rid="ref20">20</xref>-<xref ref-type="bibr" rid="ref23">23</xref>]. Because users often perceive avatars as extensions of themselves, this can enhance their sense of presence, embodiment, and attachment [<xref ref-type="bibr" rid="ref24">24</xref>]. Also, modifying avatar features to resemble the user improved embodiment, even in healthy populations [<xref ref-type="bibr" rid="ref25">25</xref>].</p><p>Prior studies show that strong embodiment can occur even with dissimilar or stylized avatars as long as key conditions like visuomotor synchrony and first-person perspective are met [<xref ref-type="bibr" rid="ref4">4</xref>,<xref ref-type="bibr" rid="ref26">26</xref>-<xref ref-type="bibr" rid="ref28">28</xref>]. Also, human familiarity with mirrors from early development supports the perception of mirrored avatars as self-representations [<xref ref-type="bibr" rid="ref29">29</xref>,<xref ref-type="bibr" rid="ref30">30</xref>]. Therefore, virtual mirrors can further enhance embodiment by allowing users to observe their avatars in motion, including facial expressions [<xref ref-type="bibr" rid="ref31">31</xref>,<xref ref-type="bibr" rid="ref32">32</xref>]. Apart from UX benefits, virtual mirrors influence psychological and behavioral outcomes in VR. The &#x201C;Proteus effect&#x201D; suggests that avatars can alter user behavior and attitudes in virtual settings [<xref ref-type="bibr" rid="ref29">29</xref>,<xref ref-type="bibr" rid="ref33">33</xref>]. However, the virtual mirror potential in VR therapy is unclear and remains underexplored.</p><p>Self-compassion is the ability to &#x201C;soothe oneself with kindness and nonjudgmental understandings in times of difficulty&#x201D; [<xref ref-type="bibr" rid="ref34">34</xref>] and is the foundation of compassion-focused therapy (CFT) [<xref ref-type="bibr" rid="ref35">35</xref>]. In VR therapy, avatars replace human actors to deliver CFT [<xref ref-type="bibr" rid="ref1">1</xref>,<xref ref-type="bibr" rid="ref12">12</xref>]. However, there are few established avatar design standards, which may disadvantage neurodivergent populations, such as those with depression, who may require accessible VR design considerations.</p><p>Although prior work has demonstrated the therapeutic value of avatar-based CFT in VR, it is unclear whether these benefits generalize to design factors such as avatar stylization, customization, and the use of virtual mirrors. Personalization has been linked to improved body ownership, agency, and immersion [<xref ref-type="bibr" rid="ref25">25</xref>], while virtual mirrors have shown potential therapeutic benefits in VR counseling [<xref ref-type="bibr" rid="ref31">31</xref>,<xref ref-type="bibr" rid="ref32">32</xref>]. These findings motivate the current study, which investigates how avatar appearance and environmental design influence UX and self-compassion in VR therapy. Accordingly, we address two research questions:</p><list list-type="bullet"><list-item><p>RQ1: Do avatar and environmental design features influence UX?</p></list-item><list-item><p>RQ2: Do these features influence therapeutic benefit, measured as self-rated self-compassion?</p></list-item></list><p>The aim of this study is to examine whether avatar appearance, avatar customization, and the use of virtual mirrors influence UX and self-compassion in VR therapy. We conducted three studies to examine the effects of (1) stylized avatars, (2) customizable stylized avatars, and (3) customizable stylized avatars with a virtual mirror. Each study built upon prior work [<xref ref-type="bibr" rid="ref3">3</xref>], focusing only on the specified experimental features. A total of 107 neurotypical participants were recruited.</p></sec><sec id="s2" sec-type="methods"><title>Methods</title><sec id="s2-1"><title>Overview</title><p>This manuscript adheres to the STROBE (Strengthening the Reporting of Observational Studies in Epidemiology) guidelines. This study replicates and extends a prior VR-based self-compassion therapy [<xref ref-type="bibr" rid="ref3">3</xref>], which serves as the control condition for study 1. The original iVR experience included an onboarding interface where participants selected avatars, environments, and companion avatars with emotional behaviors (crying or upset). The therapy consisted of two stages: delivering compassion and receiving compassion.</p><p>Three between-subject studies were conducted:</p><list list-type="bullet"><list-item><p>Study 1: Manipulated avatar appearance</p></list-item><list-item><p>Study 2: Introduced stylized avatar customization</p></list-item><list-item><p>Study 3: Introduced a virtual mirror</p></list-item></list><p>Each study built on the previous, allowing for pairwise comparisons. Stylized avatars and new features were integrated by editing the original Unity project files.</p></sec><sec id="s2-2"><title>Avatar Fidelity</title><p><xref ref-type="fig" rid="figure1">Figure 1</xref> compares the stylized avatar used in experimental conditions with the realistic avatar from the original system. Based on Weidner et al [<xref ref-type="bibr" rid="ref36">36</xref>], stylized avatars feature simplified textures and nonhuman proportions, while realistic avatars maintain human morphology and detail. The avatars of the control study (<xref ref-type="fig" rid="figure1">Figure 1</xref>, right) had such realistic features and therefore aligned with generic realistic avatars as defined in Weidner et al [<xref ref-type="bibr" rid="ref36">36</xref>].</p><fig position="float" id="figure1"><label>Figure 1.</label><caption><p>Comparison of stylized and realistic avatars used in the experimental and control conditions of study 1. The stylized avatar (left) featured simplified textures and nonhuman proportions, while the realistic avatar (right) reflected detailed human morphology. Participants (n=20) completed a between-subject virtual reality self-compassion protocol targeting depressive symptoms at The University of Queensland extended reality laboratory (2023&#x2010;2024).</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="formative_v9i1e71004_fig01.png"/></fig><p>For clarity, for the remainder of the paper, the term &#x201C;realistic&#x201D; refers to the generic realistic category [<xref ref-type="bibr" rid="ref36">36</xref>].</p></sec><sec id="s2-3"><title>Measures</title><p>Two key instruments were used:</p><list list-type="bullet"><list-item><p>User Experience Questionnaire (UEQ): Assesses UX across six scales.</p></list-item><list-item><p>Self-Compassion Scale (SCS) [<xref ref-type="bibr" rid="ref37">37</xref>]: Measures self-compassion</p></list-item></list><p>From study 2 onward, the 8-item Patient Health Questionnaire (PHQ-8) [<xref ref-type="bibr" rid="ref38">38</xref>], a validated measure for screening depression symptoms, was introduced to support downstream clinical effects of the iVR intervention. As this paper continues the work of Halim et al [<xref ref-type="bibr" rid="ref3">3</xref>], it was considered prudent to include ongoing depression measurement.</p><p>Within study 3, presence was measured using the Slater-Usoh-Steed (SUS) questionnaire [<xref ref-type="bibr" rid="ref8">8</xref>,<xref ref-type="bibr" rid="ref39">39</xref>], a 6-item scale to support the analysis of UX. With respect to measurement, presence was prioritized over embodiment measures due to practical constraints; also, as introduced earlier, presence performance implies embodiment quality [<xref ref-type="bibr" rid="ref13">13</xref>,<xref ref-type="bibr" rid="ref14">14</xref>].</p><p>A qualitative questionnaire gathered open-ended feedback about the avatars (studies 1 and 2) and a virtual mirror (study 3). These questions targeted participants&#x2019; likes and dislikes to probe for system improvements and aimed to elicit uncanny valley (UV) responses via an open-ended question in studies 1 and 2:</p><list list-type="bullet"><list-item><p xml:lang="en-gb">What were the top three things that you liked about individualized VR?</p></list-item><list-item><p xml:lang="en-gb">How do you think the next version can be improved?</p></list-item><list-item><p>How do you feel about your experience with the avatars? (Only in studies 1 and 2.)</p></list-item></list><p>In study 3, participants were given questions pertaining to their experience with the virtual mirror:</p><list list-type="bullet"><list-item><p>How do you feel about your experience with the mirror; what aspects of the mirror did you like or dislike? (Only in study 3.)</p></list-item></list><p>In the absence of eye tracking technology, these responses provided a means of validating mirror interactions. Thematic analysis was used to evaluate these responses [<xref ref-type="bibr" rid="ref40">40</xref>].</p></sec><sec id="s2-4"><title>Data Analysis</title><p>Two comparisons were made:</p><list list-type="bullet"><list-item><p>Within-study: pre-post changes in SCS, PHQ-8, and UEQ</p></list-item><list-item><p>Between-studies: differences across conditions at session completion</p></list-item></list><p>For each quantitative data set, the Shapiro-Wilk normality test was first applied. Given the normality of the data, either a Mann-Whitney <italic>U</italic> test or a 2-tailed <italic>t</italic> test was performed. The null hypothesis assumed no significant differences between conditions.</p></sec><sec id="s2-5"><title>Onboarding Procedure</title><sec id="s2-5-1"><title>iVR Therapy Stage 0: Individualization</title><p>From a lobby environment, the participants began by selecting an avatar that resembled themselves, selecting a therapeutic environment (<xref ref-type="fig" rid="figure2">Figure 2</xref>), a companion avatar, and the avatar&#x2019;s behavior. During customization (study 2), participants created avatars that resembled themselves. Movement in the VR scene was fixed to maintain task focus.</p><fig position="float" id="figure2"><label>Figure 2.</label><caption><p>Environment selection interface during the individualization stage of the individualized virtual reality self-compassion therapy. Participants (n=49) in study 2 selected a preferred virtual setting&#x2014;living room, Victorian house, or park&#x2014;as part of the avatar and scene customization process. The study targeted depressive symptoms and was conducted at The University of Queensland&#x2019;s extended reality laboratory (2023&#x2010;2024).</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="formative_v9i1e71004_fig02.png"/></fig></sec><sec id="s2-5-2"><title>iVR Therapy Stage 1: Giving Compassion</title><p>Participants interacted with their selected companion, who acted out their emotional state before the user. An instruction panel guided the participants through three compassion strategies [<xref ref-type="bibr" rid="ref1">1</xref>]:</p><list list-type="bullet"><list-item><p>Validation: Acknowledge and accept the companion&#x2019;s distress</p></list-item><list-item><p>Redirection of attention: Shift attention to something more positive</p></list-item><list-item><p>Memory activation: Encourage recalling a comforting memory (eg, someone who is kind to them)</p></list-item></list></sec><sec id="s2-5-3"><title>iVR Therapy Stage 2: Receiving Compassion</title><p>Participants reentered the same environment from the companion&#x2019;s perspective and observed their avatar delivering the recorded compassionate message from stage 1. This playback is intended to evoke a therapeutic experience and aligns with prior iVR and VR compassion interventions [<xref ref-type="bibr" rid="ref3">3</xref>,<xref ref-type="bibr" rid="ref12">12</xref>]. A second session occurred for participants in studies 2 and 3; these second sessions occurred 2 weeks after initial exposure to the intervention.</p></sec></sec><sec id="s2-6"><title>Experimental Procedure</title><sec id="s2-6-1"><title>Study 1: Stylized Avatars</title><p>To explore RQ1 and RQ2, study 1 replaced all avatars with stylized avatars purchased from SunBox Games [<xref ref-type="bibr" rid="ref41">41</xref>]. Prior work suggests that highly realistic avatars do not necessarily enhance UX in immersive, stressful environments [<xref ref-type="bibr" rid="ref42">42</xref>,<xref ref-type="bibr" rid="ref43">43</xref>]. The virtual environment in this study has a unique human-avatar interaction in stage 2, where participants receive compassion from a virtual self&#x2014;an essential component of self-compassion therapy.</p><p>According to Gisbergen et al [<xref ref-type="bibr" rid="ref42">42</xref>], stylized avatars can potentially avoid UV effects [<xref ref-type="bibr" rid="ref44">44</xref>], which may arise when avatars do not achieve full human realism. Therefore, by using lower-fidelity avatars, this study aimed to reduce user expectations for realism and improve predictability&#x2014;key aspects of system dependability as measured by the UEQ. Although dependability is not a direct measure of UV, it serves as a relevant proxy in the context of UV evaluation.</p><p>A qualitative method was included to capture UV-related responses, similar to Becker-Asano et al [<xref ref-type="bibr" rid="ref45">45</xref>]. UEQ and SCS were used for cross-study comparison. <xref ref-type="fig" rid="figure2">Figure 2</xref> shows the avatars for participant selection via the lobby interface.</p></sec><sec id="s2-6-2"><title>Study 2: Stylized Avatar Customization</title><p>Study 2 extended study 1 by adding customization options to stylized avatars (<xref ref-type="fig" rid="figure3">Figure 3</xref>). Participants adjusted the avatar using the following SunBox integrated tools [<xref ref-type="bibr" rid="ref41">41</xref>]:</p><list list-type="bullet"><list-item><p>Body: 8 parameters&#x2014;height, fat, muscle, skin, nails, eye, eyelashes, and brow</p></list-item><list-item><p>Features: hair and facial hair</p></list-item><list-item><p>Face: 22 facial features (eg, ears, nose, mouth, chin)</p></list-item><list-item><p>Clothing: 5 items&#x2014;glasses, hat, tops (shirts, jumpers, etc), bottoms (pants, skirts, etc), and shoes</p></list-item></list><fig position="float" id="figure3"><label>Figure 3.</label><caption><p>Enhanced avatar customization interface used in study 2 of the virtual reality self-compassion therapy protocol. Participants (n=49) modified a stylized avatar by adjusting body shape, facial features, and clothing using predefined editable parameters. Customization was mandatory and guided by the instruction to create an avatar resembling their self-image. The study targeted depressive symptoms and was conducted at The University of Queensland&#x2019;s extended reality laboratory (2023&#x2010;2024).</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="formative_v9i1e71004_fig03.png"/></fig><p>Customization was required before proceeding. As a quality check, the facilitator encouraged participants to create avatars that resembled themselves within the limitations of the stylized appearance. Participants could not continue unless this criterion was satisfied by a visual check from the researcher supervising. While no formal metric was used, key features such as hairstyle, skin tone, and body shape were considered. All participants engaged fully in the customization process, and informal observation confirmed that stylized avatars generally resembled participants before continuing. Study 2 also added a second iVR session 2 weeks after the first, as this was required by the introduction of the PHQ-8.</p></sec><sec id="s2-6-3"><title>Study 3: Introduction of Virtual Mirror</title><p>Study 3 extended study 2 by introducing a virtual mirror positioned to the side of the user during stage 1 (<xref ref-type="fig" rid="figure4">Figure 4</xref>). This design was intended to enhance embodiment and presence, as theorized in the Background subsection, in an effort to elicit improvements in UX and therapeutic outcomes using the current customized stylized avatar format. Participants observed real-time reflections of their avatars without interfering with the primary task.</p><fig position="float" id="figure4"><label>Figure 4.</label><caption><p>Virtual mirror setup introduced in study 3 of the individualized virtual reality self-compassion therapy protocol. Participants (n=38) viewed real-time reflections of their customized stylized avatars through a virtual mirror positioned to their side during the interaction task. The setup was designed to enhance embodiment and therapeutic presence. The study targeted depressive symptoms and was conducted at The University of Queensland&#x2019;s extended reality laboratory (2023&#x2010;2024). The participant shown is not identifiable and provided consent for inclusion.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="formative_v9i1e71004_fig04.png"/></fig></sec></sec><sec id="s2-7"><title>Participant and Study Setting</title><p>Participants were recruited from the general community via posters, social media, university mailing lists, and word of mouth. Most were students or staff at The University of Queensland. Eligibility required being &#x2265;18 years of age and capable of using immersive VR; those with severe motion sickness or conditions that might interfere with VR use were excluded.</p><p>Sessions were conducted one-on-one in a controlled room at The University of Queensland&#x2019;s extended reality laboratory (Brisbane, Australia), with a trained researcher present. Participants were recruited and tested between 2023 and 2024. Studies 2 and 3 included a 2-week in-person follow-up. Some individuals who registered did not attend their session and were not awarded course credit; no reasons were collected.</p><p>Sample sizes were determined pragmatically based on prior VR studies, available resources, and recruitment feasibility in a university setting. No formal power analysis was conducted due to the exploratory nature of the research.</p></sec><sec id="s2-8"><title>Ethical Considerations</title><p>This study was approved by The University of Queensland Human Research Ethics Committee (approval #2023/HE000468). All participants provided written informed consent prior to participation. Data were de-identified before analysis to protect participant privacy and confidentiality. Participants received course credit through the university&#x2019;s research participation system. No images or materials in the manuscript or supplementary files contain identifiable individuals.</p></sec></sec><sec id="s3" sec-type="results"><title>Results</title><sec id="s3-1"><title>Assessment Schedule</title><p>A flow diagram showing participant progression and the assessment schedule is presented in <xref ref-type="fig" rid="figure5">Figure 5</xref>.</p><fig position="float" id="figure5"><label>Figure 5.</label><caption><p>Flow diagram showing participant progression and assessment schedule across three between-subjects studies conducted at The University of Queensland extended reality laboratory (2023&#x2010;2024). Participants (N=107) were enrolled in study 1 (n=20), study 2 (n=49), or study 3 (n=38), and 7 were later excluded from the analysis. The studies targeted depressive symptoms and included pre- and posttest measures of self-compassion (SCS), depression (PHQ-8), presence (SUS), and qualitative feedback. Studies 2 and 3 included a second session at least 2 weeks later. Interventions varied by avatar customization and the presence of a mirror. PHQ-8: 8-item Patient Health Questionnaire; SCS: Self-Compassion Scale; SUS: Slater-Usoh-Steed.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="formative_v9i1e71004_fig05.png"/></fig></sec><sec id="s3-2"><title>Study 1: Stylized Avatars</title><p>A total of 20 participants completed session 1 (n=12 female participants, n=6 male participants, and n=2 undisclosed).</p><sec id="s3-2-1"><title>Impact on User Experience (UEQ)</title><p>At the time of analysis, the UEQ benchmark included 21,175 users from 468 studies assessing software and digital products [<xref ref-type="bibr" rid="ref46">46</xref>]. As shown in <xref ref-type="fig" rid="figure6">Figure 6</xref>, stylized avatars resulted in a category increase in perspicuity compared to the original study, as seen in <xref ref-type="fig" rid="figure7">Figure 7</xref>. All UEQ categories remained at least &#x201C;above average,&#x201D; indicating no negative impact on the UX. Thus, in relation to RQ1, stylized avatars appear appropriate.</p><fig position="float" id="figure6"><label>Figure 6.</label><caption><p>User Experience Questionnaire (UEQ) results from study 1, where participants (n=20; n=12 female, n=6 male, and n=2 undisclosed) interacted with stylized avatars during a single-session virtual reality self-compassion therapy targeting depressive symptoms. Participants completed the UEQ after session 1. Compared to the baseline results using realistic avatars (<xref ref-type="fig" rid="figure7">Figure 7</xref>), perspicuity scores improved while other dimensions remained at or above average. The study was conducted at The University of Queensland&#x2019;s extended reality laboratory (2023).</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="formative_v9i1e71004_fig06.png"/></fig><fig position="float" id="figure7"><label>Figure 7.</label><caption><p>User Experience Questionnaire (UEQ) results from a prior study [<xref ref-type="bibr" rid="ref3">3</xref>] using realistic avatars in a self-compassion virtual reality therapy protocol. This figure presents participant scores across six UEQ dimensions and serves as a baseline for evaluating the impact of avatar visual style in the current research. The original study targeted depressive symptoms and was conducted at The University of Queensland&#x2019;s extended reality laboratory (2022). Participants were neurotypical adults recruited from the university community.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="formative_v9i1e71004_fig07.png"/></fig></sec><sec id="s3-2-2"><title>Impact of Self-Compassion (SCS)</title><p>The mean SCS score in study 1 was 3.11 (SD 1.14; Table S1 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>). Scores below 2.4 are low, between 2.4 and 3.6 are average, and above 3.6 are high [<xref ref-type="bibr" rid="ref47">47</xref>]. Therefore, after one VR therapy session, participants reported average levels of self-compassion.</p><p>A Shapiro-Wilk test for normality confirmed that data were normally distributed for study 1. A between-studies comparison with Halim et al [<xref ref-type="bibr" rid="ref3">3</xref>] using a 2-tailed <italic>t</italic> test found no significant difference in SCS between the control group (mean 3.07, SD 0.73) and study 1 (mean 3.11, SD 1.14; t<sub>53</sub>=0.14; <italic>P</italic>=.89; Cohen <italic>d</italic>=0.04). These results indicate stylized avatars had no measurable impact on self-compassion (RQ2).</p></sec></sec><sec id="s3-3"><title>Study 2: Stylized Avatar Customization</title><p>A total of 49 participants completed session 1; 42 (86%) returned for session 2. The gender of the participants was as follows:</p><list list-type="bullet"><list-item><p>Session 1: 22 female, 22 male, 5 undisclosed</p></list-item><list-item><p>Session 2: 18 female, 21 male, 3 undisclosed</p></list-item></list><sec id="s3-3-1"><title>Impact on User Experience (UEQ)</title><p>Study 2 reused the same UEQ tool as study 1. As seen in <xref ref-type="fig" rid="figure8">Figure 8</xref>, scores declined in perspicuity, dependability, and novelty, with only dependability dropping below average. Given a between-studies comparison (Table S2 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>), UX differences were not statistically significant; therefore, with respect to RQ1, we did not find evidence that stylized avatar customization significantly influenced UX under the conditions tested.</p><fig position="float" id="figure8"><label>Figure 8.</label><caption><p>User Experience Questionnaire (UEQ) results from study 2 (n=49; session 1), in which participants interacted with personalized stylized avatars during a virtual reality self-compassion therapy targeting depressive symptoms. Participants completed the UEQ after their first session. Compared to study 1, mean scores decreased in perspicuity, dependability, and novelty, with only dependability rated below the neutral benchmark. The study was conducted at The University of Queensland&#x2019;s extended reality laboratory (2023&#x2010;2024).</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="formative_v9i1e71004_fig08.png"/></fig><fig position="float" id="figure9"><label>Figure 9.</label><caption><p>User Experience Questionnaire (UEQ) results from study 3 (n=38; session 1), where participants engaged in virtual reality self-compassion therapy using stylized avatars and a virtual mirror. Compared to study 2, scores significantly improved in attractiveness, perspicuity, dependability, and stimulation, with three dimensions rated as &#x201C;excellent.&#x201D; Assessments were completed after the first session. The study was conducted at The University of Queensland&#x2019;s extended reality laboratory (2023&#x2010;2024) and targeted depressive symptoms.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="formative_v9i1e71004_fig09.png"/></fig></sec><sec id="s3-3-2"><title>Impact of Self-Compassion (SCS)</title><p>For a within-study comparison, the Shapiro-Wilk test for normality confirmed that data were normally distributed. As seen in <xref ref-type="table" rid="table1">Table 1</xref>, a 2-tailed <italic>t</italic> test comparing baseline and session 2 SCS in study 2 revealed a statistically significant difference with a moderate effect size. These results provide evidence that customization positively influenced the therapeutic outcome (RQ2).</p><table-wrap id="t1" position="float"><label>Table 1.</label><caption><p>Summary statistics and internal comparisons from study 2 of a three-part mixed methods study investigating virtual reality self-compassion therapy for depressive symptoms.<sup><xref ref-type="table-fn" rid="table1fn1">a</xref></sup></p></caption><table id="table1" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Measures</td><td align="left" valign="bottom">Mean (SD) or mean rank</td><td align="left" valign="bottom">Mann-Whitney <italic>U</italic> test</td><td align="left" valign="bottom"><italic>t</italic> test (df) or <italic>z</italic></td><td align="left" valign="bottom"><italic>P</italic> value (2-tailed)</td><td align="left" valign="bottom">Effect size (Cohen <italic>d</italic> or <italic>r</italic>)</td></tr></thead><tbody><tr><td align="left" valign="top" colspan="2">UEQ<sup><xref ref-type="table-fn" rid="table1fn2">b</xref></sup></td><td align="left" valign="top">&#x2014;<sup><xref ref-type="table-fn" rid="table1fn3">c</xref></sup></td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Attractiveness</td><td align="left" valign="top">1.31 (0.85)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Perspicuity</td><td align="left" valign="top">1.65 (0.96)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Efficiency</td><td align="left" valign="top">1.19 (0.79)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Dependability</td><td align="left" valign="top">1.07 (0.81)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Stimulation</td><td align="left" valign="top">1.18 (0.96)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Novelty</td><td align="left" valign="top">0.85 (1.01)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top" colspan="6">SCS<sup><xref ref-type="table-fn" rid="table1fn4">d</xref></sup></td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Baseline</td><td align="left" valign="top">3.05 (0.98)</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Session 2</td><td align="left" valign="top">3.55 (1.16)</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Comparison</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">2.219 (89)<sup><xref ref-type="table-fn" rid="table1fn5">e</xref></sup></td><td align="left" valign="top">.03</td><td align="left" valign="top">&#x2212;0.47</td></tr><tr><td align="left" valign="top" colspan="6">PHQ-8<sup><xref ref-type="table-fn" rid="table1fn6">f</xref></sup></td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Baseline</td><td align="left" valign="top">46.16<sup><xref ref-type="table-fn" rid="table1fn7">g</xref></sup></td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Session 2</td><td align="left" valign="top">44.71<sup><xref ref-type="table-fn" rid="table1fn7">g</xref></sup></td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Comparison</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">972</td><td align="left" valign="top">0.26<sup><xref ref-type="table-fn" rid="table1fn8">h</xref></sup></td><td align="left" valign="top">.79</td><td align="left" valign="top">0.02<sup><xref ref-type="table-fn" rid="table1fn9">i</xref></sup></td></tr></tbody></table><table-wrap-foot><fn id="table1fn1"><p><sup>a</sup>Participants (n=49 at session 1; n=42 at session 2) interacted with personalized stylized avatars. Measures included the UEQ, SCS, and PHQ-8. The study was conducted at The University of Queensland extended reality laboratory between 2023 and 2024 using a between-subjects design.</p></fn><fn id="table1fn2"><p><sup>b</sup>UEQ: User Experience Questionnaire.</p></fn><fn id="table1fn3"><p><sup>c</sup>Not applicable</p></fn><fn id="table1fn4"><p><sup>d</sup>SCS: Self-Compassion Scale.</p></fn><fn id="table1fn5"><p><sup>e</sup><italic>t</italic> test value.</p></fn><fn id="table1fn6"><p><sup>f</sup>PHQ-8: 8-item Patient Health Questionnaire.</p></fn><fn id="table1fn7"><p><sup>g</sup>Mean rank value.</p></fn><fn id="table1fn8"><p><sup>h</sup><italic>z</italic> value.</p></fn><fn id="table1fn9"><p><sup>i</sup><italic>r</italic> value.</p></fn></table-wrap-foot></table-wrap></sec><sec id="s3-3-3"><title>Impact on Depression (PHQ-8)</title><p>The Shapiro-Wilk test for normality revealed nonnormal distribution; thus, a Mann-Whitney <italic>U</italic> test was used. No significant difference was found between baseline and session 2 PHQ-8 scores, with a negligible effect size (see <xref ref-type="table" rid="table1">Table 1</xref>).</p></sec></sec><sec id="s3-4"><title>Study 3: Introduction of Virtual Mirror</title><p>A total of 38 participants completed session 1; 35 (92%) returned for Session 2. Gender of the participants was as follows:</p><list list-type="bullet"><list-item><p>Session 1: 20 female, 18 male</p></list-item><list-item><p>Session 2: 20 female, 15 male</p></list-item></list><sec id="s3-4-1"><title>Impact on User Experience (UEQ)</title><p>No changes to the UEQ tool have been made since study 2. As seen in <xref ref-type="fig" rid="figure8">Figures 8</xref> and <xref ref-type="fig" rid="figure9">9</xref>, study 3 scored higher in attractiveness, perspicuity, dependability, and stimulation compared to study 2. Three of six dimensions were rated &#x201C;Excellent.&#x201D; Given a between-studies comparison (Table S2 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>), UX differences were statistically significant; therefore, we found evidence that stylized avatar customization and virtual mirrors significantly influenced UX. These findings support RQ1, suggesting that stylized avatar customization and virtual mirrors significantly influenced UX.</p></sec><sec id="s3-4-2"><title>Impact of Self-Compassion (SCS)</title><p>After removing five participants who did not complete the full SCS questionnaire, the Shapiro-Wilk test confirmed that data were normally distributed. A within-study, 2-tailed <italic>t</italic> test revealed no significant difference between baseline and session 2 SCS, with a small effect size (see <xref ref-type="table" rid="table2">Table 2</xref>). These results do not provide evidence that mirrors influenced self-compassion outcomes (RQ2).</p><table-wrap id="t2" position="float"><label>Table 2.</label><caption><p>Summary statistics and internal comparisons from study 3 of a three-part mixed methods investigation into virtual reality self-compassion therapy for depressive symptoms.<sup><xref ref-type="table-fn" rid="table2fn1">a</xref></sup></p></caption><table id="table2" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Measures</td><td align="left" valign="bottom">Mean (SD) or mean rank</td><td align="left" valign="bottom">Mann-Whitney <italic>U</italic> test</td><td align="left" valign="bottom"><italic>t</italic> test or <italic>z</italic></td><td align="left" valign="bottom"><italic>P</italic> value (2-tailed)</td><td align="left" valign="bottom">Effect Size (Cohen <italic>d</italic> or <italic>r</italic>)</td></tr></thead><tbody><tr><td align="left" valign="top" colspan="2">UEQ<sup><xref ref-type="table-fn" rid="table2fn2">b</xref></sup></td><td align="left" valign="top">&#x2014;<sup><xref ref-type="table-fn" rid="table2fn3">c</xref></sup></td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Attractiveness</td><td align="left" valign="top">1.88 (0.92)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Perspicuity</td><td align="left" valign="top">2.22 (0.65)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Efficiency</td><td align="left" valign="top">1.42 (1.04)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Dependability</td><td align="left" valign="top">1.76 (0.75)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Stimulation</td><td align="left" valign="top">1.65 (1.02)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Novelty</td><td align="left" valign="top">0.87 (1.04)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top" colspan="6">SCS<sup><xref ref-type="table-fn" rid="table2fn4">d</xref></sup></td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Baseline</td><td align="left" valign="top">3.88 (1.33)</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Session 2</td><td align="left" valign="top">4.09 (1.05)</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Comparison</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">0.71 (63)<sup><xref ref-type="table-fn" rid="table2fn5">e</xref></sup></td><td align="left" valign="top">.47</td><td align="left" valign="top">0.18</td></tr><tr><td align="left" valign="top" colspan="6">PHQ-8<sup><xref ref-type="table-fn" rid="table2fn6">f</xref></sup></td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Baseline</td><td align="left" valign="top">44.93<sup><xref ref-type="table-fn" rid="table2fn7">g</xref></sup></td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Session 2</td><td align="left" valign="top">32.07<sup><xref ref-type="table-fn" rid="table2fn7">g</xref></sup></td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Comparison</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">477.5</td><td align="left" valign="top">2.53<sup><xref ref-type="table-fn" rid="table2fn8">h</xref></sup></td><td align="left" valign="top">.01</td><td align="left" valign="top">0.30<sup><xref ref-type="table-fn" rid="table2fn9">i</xref></sup></td></tr></tbody></table><table-wrap-foot><fn id="table2fn1"><p><sup>a</sup>Participants (n=38 at session 1; n=35 at session 2) interacted with personalized stylized avatars and received visual feedback via a virtual mirror. Measures included the UEQ, SCS, and PHQ-8. The study was conducted at The University of Queensland extended reality laboratory between 2023 and 2024 using a between-subjects design.</p></fn><fn id="table2fn2"><p><sup>b</sup>UEQ: User Experience Questionnaire.</p></fn><fn id="table2fn3"><p><sup>c</sup>Not applicable.</p></fn><fn id="table2fn4"><p><sup>d</sup>SCS: Self-Compassion Scale.</p></fn><fn id="table2fn5"><p><sup>e</sup><italic>t</italic> test value.</p></fn><fn id="table2fn6"><p><sup>f</sup>PHQ-8: 8-item Patient Health Questionnaire.</p></fn><fn id="table2fn7"><p><sup>g</sup>Mean rank value.</p></fn><fn id="table2fn8"><p><sup>h</sup><italic>z</italic> value.</p></fn><fn id="table2fn9"><p><sup>i</sup><italic>r</italic> value.</p></fn></table-wrap-foot></table-wrap></sec><sec id="s3-4-3"><title>Impact on Depression (PHQ-8)</title><p>The data were not normally distributed; therefore, a Mann-Whitney <italic>U</italic> test found a significant reduction in PHQ-8 scores from baseline to session 2 (see <xref ref-type="table" rid="table2">Table 2</xref>). This result suggests a potential therapeutic benefit associated with the inclusion of stylized avatar customization and virtual mirrors.</p></sec><sec id="s3-4-4"><title>Impact on Presence (SUS)</title><p>The mean SUS score in study 3 (mean 4.56, SD 1.58, 95% CI 4.02-5.10) overlapped with the benchmark SUS scores in Usoh et al [<xref ref-type="bibr" rid="ref39">39</xref>], which reported a virtual (mean 3.8) and a real (mean 4.4) environment. This suggests that presence in our virtual mirror condition fell within a comparable range previously validated by Usoh et al[<xref ref-type="bibr" rid="ref39">39</xref>] and did not significantly differ from real-world experience (<italic>t</italic><sub>46</sub>=0.30; <italic>P</italic>=.77; <italic>d</italic>=0.18). The lower bound of our CI exceeded the virtual benchmark, suggesting a relatively strong presence in the mirror condition.</p></sec></sec><sec id="s3-5"><title>Cross-Study Qualitative Analysis</title><p>Open-ended feedback was collected from 100 participants across all three studies; participants completed their assigned iVR therapy exposures prior to providing responses. Deductive and inductive thematic analysis was conducted on participants&#x2019; open-ended responses. Participants were labeled P1-P20 (study 1), P21-P62 (study 2), and P63-P100 (study 3). The deductive thematic approach (Table S3 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>) includes codes corresponding to UEQ dimensions from Schrepp et al [<xref ref-type="bibr" rid="ref46">46</xref>], which provided a structured lens through which to interpret qualitative feedback.</p><p>In parallel, an inductive analysis was performed to identify emergent themes not captured by the UEQ framework. These data-driven themes reflected patterns in participant experience that appeared to be consistent with experimental changes. The results of the inductive analysis are summarized in Table S4 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>.</p></sec></sec><sec id="s4" sec-type="discussion"><title>Discussion</title><sec id="s4-1"><title>Overview</title><p>This study explored how stylized avatars, avatar customization, and virtual mirrors influenced UX (RQ1) and self-compassion outcomes (RQ2) across three iVR therapy conditions. In study 1, qualitative feedback indicated general acceptance of the stylized avatars. Study 2 showed a significant improvement in self-compassion outcomes compared to study 1. Study 3 produced significant improvements in four of the six UEQ dimensions, which suggests improved UX.</p></sec><sec id="s4-2"><title>Summary of User Experience (RQ1)</title><p>For cross-study comparisons, see Table S2 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>. Study 3 yielded the highest attractiveness scores, with a statistically significant improvement compared to study 2 (<italic>P</italic>=.01). Qualitative data indicated this may be attributed to the enjoyment of avatar customization, which was introduced in study 2 and praised by participants in study 3. However, immersion-related feedback also emerged, such as &#x201C;some of the environment designs feel too rigid&#x2026;which breaks the immersion&#x201D; (P78). Since Study 3 introduced virtual mirrors&#x2014;known to support embodiment&#x2014;this addition may explain the rise in UX.</p><p>On the perspicuity scale, study 3 again scored highest and significantly outperformed study 2 (<italic>P</italic>=.01). Participants highlighted the ease of learning, but across all studies, a lack of narrative context was noted. For example, participants reported difficulty understanding the avatars&#x2019; emotional states, affecting their ability to engage in role-play scenarios.</p><p>Study 3 also significantly improved dependability scores (<italic>P</italic>&#x003C;.001). The stylized avatars, rather than triggering discomfort, were perceived as appealing&#x2014;&#x201C;they&#x2019;re all cute&#x201D; (P37)&#x2014;and met participant expectations for a supportive environment. Thematic analysis suggested that the participants felt the system supported therapeutic outcomes: &#x201C;I think this can make me feel relaxed&#x201D; (P83).</p><p>Stimulation was significantly higher in study 3 than in study 2 (<italic>P</italic>=.04). Participants reported increased interest when viewing themselves delivering compassion, suggesting that the mirror feature enhanced engagement.</p><p>With respect to novelty, all studies received positive feedback&#x2014;&#x201C;innovative and unique&#x201D; (P20)&#x2014;but only study 1 reached an &#x201C;above average&#x201D; benchmark. Some participants reported reduced novelty due to restricted avatar movement, for example, &#x201C;better if the users got a little more freedom during the simulation&#x201D; (P56). This may indicate a design trade-off between structured therapeutic focus and exploratory freedom.</p></sec><sec id="s4-3"><title>Interpreting User Experience</title><p>The UEQ served as the primary UX tool due to its alignment with iVR design goals for mental health. Though no significant differences were observed between studies 1 and 2, stylized avatars performed well compared to the UEQ benchmark, validating their continued exploration in therapeutic contexts. However, despite enhanced customization in study 2, UEQ scores did not improve significantly. One possible explanation may lie in elevated participant expectations (eg, limited skin tone options), which may have led to reduced satisfaction. This supports the idea that &#x201C;less is more&#x201D;; a limited but well-executed customization system may better serve therapeutic VR experiences.</p><p>Feedback across studies emphasized the importance of avatar individualization for user acceptance. Study 3&#x2019;s improvement in multiple UEQ dimensions suggests that mirrors&#x2014;without being explicitly highlighted&#x2014;supported user presence (and likely embodiment). &#x201C;Attractiveness is a pure valence dimension&#x201D; [<xref ref-type="bibr" rid="ref46">46</xref>], that is, representative of the user&#x2019;s general experience. Attractiveness rose significantly in study 3, likely due to parallel improvements in perspicuity, dependability, and stimulation.</p><p>This connection is further supported by SUS findings. Significant improvements on items related to perceived realness&#x2014;questions 2 and 3 of the SUS questionnaire [<xref ref-type="bibr" rid="ref8">8</xref>]&#x2014;and memory of the experience indicate that participants recalled the virtual mirror scenario as immersive and believable. Given the high scores (Table S5 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>), this kind of recall implies a strong sense of presence [<xref ref-type="bibr" rid="ref39">39</xref>].</p><p>Taken together, study 3 suggests that the inclusion of virtual mirrors was a key turning point in enhancing iVR UX. While prior studies did not yield significant gains, mirrors may have &#x201C;unlocked&#x201D; the potential of the design. Whether these effects are solely due to mirrors or the cumulative impact of prior changes remains unclear, but no prior condition showed similar improvements.</p></sec><sec id="s4-4"><title>Summary of Therapeutic Outcomes (RQ2)</title><p>In study 2, participants exposed to customizable stylized avatars over two sessions showed a significant increase in self-compassion (SCS). This supports the notion that stylistic, customized avatars in iVR interactions may enhance therapeutic benefit. However, these gains were not observed in study 3, despite the enhanced UX. This suggests that while mirrors improve UX and presence, they may not directly translate to better therapeutic outcomes.</p><p>Nevertheless, qualitative feedback highlighted that participants valued the ability to create avatars resembling themselves. It was noted that this customization improved their emotional connection to the experience in stage 2 of the experiment. For example, &#x201C;customizing it to look like me did make the difference of how I perceived the audio replay&#x201D; (P60). This statement implies that appearance alone was insufficient; users needed a visual feedback loop (eg, mirrors) to fully benefit from avatar customization.</p><p>Interestingly, mirror placement was not emphasized to participants yet still yielded a significant impact. However, incorrect or unnatural mirror placement disrupted immersion. For example, &#x201C;[mirror] could be weird [to see] in the park environment&#x201D; (P64) or &#x201C;I see the mirror and then walk through it. I&#x2019;m [a] ghost and that makes me a little bit scar[ed]&#x201D; (P76). This underscores the importance of environmental congruence in design-embodied iVR therapy.</p></sec><sec id="s4-5"><title>The Uncanny Valley</title><p>The appearance of stylized avatars was generally well received, supporting the idea that less realistic avatars can be effectively used in emotionally sensitive and immersive environments such as iVR self-compassion therapy, where avatar-owner resemblance is important. This finding aligns with prior research suggesting that stylized avatars may reduce the risk of UV effects while still fostering user connection [<xref ref-type="bibr" rid="ref42">42</xref>,<xref ref-type="bibr" rid="ref43">43</xref>,<xref ref-type="bibr" rid="ref48">48</xref>]. Participants responded positively to avatars with cartoonish or humanoid qualities. &#x201C;I don&#x2019;t recommend realistic avatars as much. I think the more cartoonish appeal is better&#x201D; (P41).</p><p>The UV model suggests that entities close to human-likeness [<xref ref-type="bibr" rid="ref44">44</xref>], but imperfect, can cause discomfort. The use of stylized avatars in this study&#x2014;located near &#x201C;Humanoid Robot&#x201D; on the affinity line&#x2014;appears to have mitigated this risk. Mirrors further supported predictability and safety (dependability), suggesting that this combination may serve as a UX-enhancing strategy to avoid UV interference.</p><p>That said, UV was not the primary aim of this study but a theoretical background for design rationale. However, these reflections are offered as considerations for future iVR therapy design. It is important to note that violations of physical logic (eg, walking through mirrors, unnatural placement) also negatively impacted immersion and should be carefully addressed in future iterations.</p></sec><sec id="s4-6"><title>Implications and Design Reflections</title><p>This study shows that stylized avatars, when paired with suitable UX enhancements such as virtual mirrors and avatar customization (along with other individualization concepts, environment selection, companion avatars, emotional behavior, etc), can offer stronger UX without losing therapeutic benefit, which might be an assumed consequence of selecting a lesser fidelity avatar. Pursuing hyperrealistic avatars without considering context may be counterproductive, as this ignores the Sisyphus-like limitation that avatars will risk falling back into the UV when realism is applied without careful design intent. This study gathers evidence to suggest that lower-fidelity avatars can be more appropriate in therapeutic settings but also that context matters when selecting avatar designs.</p></sec><sec id="s4-7"><title>Limitation</title><p>This study used the SCS&#x2014;a validated tool&#x2014;but it has known psychometric limitations [<xref ref-type="bibr" rid="ref49">49</xref>]. Additionally, participants were not filtered based on PHQ-8 scores; however, 74% had mild or higher range depression results. Future study iterations should include codesign and testing with clinical populations.</p><p>While the fidelity comparison was carefully controlled across studies, data were collected at different time points, which introduces the possibility of unmeasured societal or contextual differences. Future studies should aim to run control and experimental conditions concurrently to further minimize potential time-related confounds. Also, avatar fidelity was classified using a binary framework [<xref ref-type="bibr" rid="ref36">36</xref>]. Future research may benefit from applying more continuous fidelity measures&#x2014;such as user-rated realism scales or quantitative visual complexity metrics&#x2014;to better capture subtle design differences.</p><p>Mirror engagement was inferred rather than directly measured, as the Meta Quest 2 headset lacks built-in eye tracking. However, mirrors were centrally positioned, and qualitative responses strongly suggest that participants did engage with them.</p><p>Finally, some tests were underpowered, which limited the ability to detect small-to-moderate effects. To help interpret these findings and provide additional insight, qualitative methods were included to support quantitative results.</p></sec><sec id="s4-8"><title>Conclusion</title><p>If compassion means &#x201C;to suffer together&#x201D; [<xref ref-type="bibr" rid="ref50">50</xref>], then does engaging in self-compassion mean suffering alone? Perhaps not with these avatars. In this iVR intervention, participants were comforted by their own stylized, customized avatars within immersive environments enhanced by virtual mirrors. Participants experienced self-compassion in a space tailored to their preferences, offering new insights into how avatar design and environmental cues shape therapeutic experiences.</p><p>This study sequentially investigated whether stylized avatars, avatar customization, and virtual mirrors influence UX and therapeutic outcomes in self-compassion VR therapy. Across three studies, results demonstrated that stylized avatars were well accepted and did not diminish self-compassion outcomes. Avatar customization improved self-compassion scores in one study, while virtual mirrors significantly improved UX across multiple dimensions. However, enhanced UX alone did not guarantee stronger therapeutic outcomes.</p><p>These findings suggest that individualized avatars and virtual mirrors can meaningfully enhance iVR experiences without triggering UV effects. More broadly, this study supports the feasibility of lower-fidelity avatars in mental health VR applications. Future research should explore these design strategies in clinical populations to better understand avatar-based therapy interventions.</p></sec></sec></body><back><ack><p>The authors acknowledge Swaraj Vishwas Randhir and Vibhav Chitale for their contributions to the development of the individualized virtual reality environment and provision of the previous virtual environment. The authors would also like to acknowledge Lachlan Greig for providing consultation on psychological survey analysis and Chaitanya Dasi Luanzon for providing imagery. Generative artificial intelligence (ChatGPT; OpenAI) was used for grammar and language editing assistance during manuscript preparation. All content was reviewed and edited by the authors for accuracy and clarity. This research received no external funding. The authors conducted the study independently. A portion of the findings from studies 1 and 2 has been accepted for presentation at the Mixed/Augmented Reality for Mental Health workshop, part of the IEEE International Symposium on Mixed and Augmented Reality&#x2013;Adjunct, as "Customizing Stylized Avatars for Self-Compassion: Design Impacts in Virtual Reality Therapy" [<xref ref-type="bibr" rid="ref50">50</xref>]. The present manuscript extends this work to include the complete set of three studies with expanded therapeutic framing, detailed methodology, and integrated qualitative findings.</p></ack><notes><sec><title>Data Availability</title><p>Additional materials may be made available upon reasonable request to the corresponding author.</p></sec></notes><fn-group><fn fn-type="conflict"><p>None declared.</p></fn></fn-group><glossary><title>Abbreviations</title><def-list><def-item><term id="abb1">CFT</term><def><p>compassion-focused therapy</p></def></def-item><def-item><term id="abb2">iVR</term><def><p>individualized virtual reality</p></def></def-item><def-item><term id="abb3">PHQ-8</term><def><p>8-item Patient Health Questionnaire</p></def></def-item><def-item><term id="abb4">SCS</term><def><p>Self-Compassion Scale</p></def></def-item><def-item><term id="abb5">STROBE</term><def><p>Strengthening the Reporting of Observational Studies in Epidemiology</p></def></def-item><def-item><term id="abb6">SUS</term><def><p>Slater-Usoh-Steed</p></def></def-item><def-item><term id="abb7">UEQ</term><def><p>User Experience Questionnaire</p></def></def-item><def-item><term id="abb8">UV</term><def><p>uncanny valley</p></def></def-item><def-item><term id="abb9">UX</term><def><p>user experience</p></def></def-item><def-item><term id="abb10">VR</term><def><p>virtual reality</p></def></def-item></def-list></glossary><ref-list><title>References</title><ref id="ref1"><label>1</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Falconer</surname><given-names>CJ</given-names> </name><name name-style="western"><surname>Slater</surname><given-names>M</given-names> </name><name name-style="western"><surname>Rovira</surname><given-names>A</given-names> </name><etal/></person-group><article-title>Embodying compassion: a virtual reality paradigm for overcoming excessive self-criticism</article-title><source>PLoS ONE</source><year>2014</year><volume>9</volume><issue>11</issue><fpage>e111933</fpage><pub-id pub-id-type="doi">10.1371/journal.pone.0111933</pub-id><pub-id pub-id-type="medline">25389766</pub-id></nlm-citation></ref><ref id="ref2"><label>2</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Baghaei</surname><given-names>N</given-names> </name><name name-style="western"><surname>Stemmet</surname><given-names>L</given-names> </name><name name-style="western"><surname>Khaliq</surname><given-names>I</given-names> </name><etal/></person-group><article-title>Designing individualised virtual reality applications for supporting depression: a feasibility study</article-title><conf-name>EICS &#x2019;21: Companion of the 2021 ACM SIGCHI Symposium on Engineering Interactive Computing Systems</conf-name><conf-date>Jun 8-11, 2021</conf-date><pub-id pub-id-type="doi">10.1145/3459926.3464761</pub-id></nlm-citation></ref><ref id="ref3"><label>3</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Halim</surname><given-names>I</given-names> </name><name name-style="western"><surname>Stemmet</surname><given-names>L</given-names> </name><name name-style="western"><surname>Hach</surname><given-names>S</given-names> </name><etal/></person-group><article-title>Individualized virtual reality for increasing self-compassion: evaluation study</article-title><source>JMIR Ment Health</source><year>2023</year><month>10</month><day>2</day><volume>10</volume><fpage>e47617</fpage><pub-id pub-id-type="doi">10.2196/47617</pub-id><pub-id pub-id-type="medline">37782537</pub-id></nlm-citation></ref><ref id="ref4"><label>4</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Slater</surname><given-names>M</given-names> </name><name name-style="western"><surname>Spanlang</surname><given-names>B</given-names> </name><name name-style="western"><surname>Sanchez-Vives</surname><given-names>MV</given-names> </name><name name-style="western"><surname>Blanke</surname><given-names>O</given-names> </name></person-group><article-title>First person experience of body transfer in virtual reality</article-title><source>PLoS ONE</source><year>2010</year><month>05</month><day>12</day><volume>5</volume><issue>5</issue><fpage>e10564</fpage><pub-id pub-id-type="doi">10.1371/journal.pone.0010564</pub-id><pub-id pub-id-type="medline">20485681</pub-id></nlm-citation></ref><ref id="ref5"><label>5</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Blanke</surname><given-names>O</given-names> </name><name name-style="western"><surname>Metzinger</surname><given-names>T</given-names> </name></person-group><article-title>Full-body illusions and minimal phenomenal selfhood</article-title><source>Trends Cogn Sci</source><year>2009</year><month>01</month><volume>13</volume><issue>1</issue><fpage>7</fpage><lpage>13</lpage><pub-id pub-id-type="doi">10.1016/j.tics.2008.10.003</pub-id><pub-id pub-id-type="medline">19058991</pub-id></nlm-citation></ref><ref id="ref6"><label>6</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Banakou</surname><given-names>D</given-names> </name><name name-style="western"><surname>Slater</surname><given-names>M</given-names> </name></person-group><article-title>Embodiment in a virtual body that speaks produces agency over the speaking but does not necessarily influence subsequent real speaking</article-title><source>Sci Rep</source><year>2017</year><month>10</month><day>27</day><volume>7</volume><issue>1</issue><fpage>14227</fpage><pub-id pub-id-type="doi">10.1038/s41598-017-14620-5</pub-id><pub-id pub-id-type="medline">29079802</pub-id></nlm-citation></ref><ref id="ref7"><label>7</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Slater</surname><given-names>M</given-names> </name></person-group><article-title>A note on presence terminology</article-title><source>UCL Computer Science</source><year>2003</year><access-date>2025-09-26</access-date><comment><ext-link ext-link-type="uri" xlink:href="http://www0.cs.ucl.ac.uk/research/vr/Projects/Presencia/ConsortiumPublications/ucl_cs_papers/presence-terminology.htm">http://www0.cs.ucl.ac.uk/research/vr/Projects/Presencia/ConsortiumPublications/ucl_cs_papers/presence-terminology.htm</ext-link></comment></nlm-citation></ref><ref id="ref8"><label>8</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Slater</surname><given-names>M</given-names> </name><name name-style="western"><surname>Usoh</surname><given-names>M</given-names> </name><name name-style="western"><surname>Steed</surname><given-names>A</given-names> </name></person-group><article-title>Depth of presence in virtual environments</article-title><source>Presence Teleoperators Virtual Environ</source><year>1994</year><month>01</month><volume>3</volume><issue>2</issue><fpage>130</fpage><lpage>144</lpage><pub-id pub-id-type="doi">10.1162/pres.1994.3.2.130</pub-id></nlm-citation></ref><ref id="ref9"><label>9</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Grassini</surname><given-names>S</given-names> </name><name name-style="western"><surname>Laumann</surname><given-names>K</given-names> </name><name name-style="western"><surname>Rasmussen Skogstad</surname><given-names>M</given-names> </name></person-group><article-title>The use of virtual reality alone does not promote training performance (but sense of presence does)</article-title><source>Front Psychol</source><year>2020</year><volume>11</volume><fpage>1743</fpage><pub-id pub-id-type="doi">10.3389/fpsyg.2020.01743</pub-id><pub-id pub-id-type="medline">32765384</pub-id></nlm-citation></ref><ref id="ref10"><label>10</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Wirth</surname><given-names>W</given-names> </name><name name-style="western"><surname>Hartmann</surname><given-names>T</given-names> </name><name name-style="western"><surname>B&#x00F6;cking</surname><given-names>S</given-names> </name><etal/></person-group><article-title>A process model of the formation of spatial presence experiences</article-title><source>Media Psychol</source><year>2007</year><month>05</month><day>15</day><volume>9</volume><issue>3</issue><fpage>493</fpage><lpage>525</lpage><pub-id pub-id-type="doi">10.1080/15213260701283079</pub-id></nlm-citation></ref><ref id="ref11"><label>11</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Sanchez-Vives</surname><given-names>MV</given-names> </name><name name-style="western"><surname>Slater</surname><given-names>M</given-names> </name></person-group><article-title>From presence to consciousness through virtual reality</article-title><source>Nat Rev Neurosci</source><year>2005</year><month>04</month><volume>6</volume><issue>4</issue><fpage>332</fpage><lpage>339</lpage><pub-id pub-id-type="doi">10.1038/nrn1651</pub-id><pub-id pub-id-type="medline">15803164</pub-id></nlm-citation></ref><ref id="ref12"><label>12</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Falconer</surname><given-names>CJ</given-names> </name><name name-style="western"><surname>Rovira</surname><given-names>A</given-names> </name><name name-style="western"><surname>King</surname><given-names>JA</given-names> </name><etal/></person-group><article-title>Embodying self-compassion within virtual reality and its effects on patients with depression</article-title><source>BJPsych Open</source><year>2016</year><month>01</month><volume>2</volume><issue>1</issue><fpage>74</fpage><lpage>80</lpage><pub-id pub-id-type="doi">10.1192/bjpo.bp.115.002147</pub-id><pub-id pub-id-type="medline">27703757</pub-id></nlm-citation></ref><ref id="ref13"><label>13</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kilteni</surname><given-names>K</given-names> </name><name name-style="western"><surname>Groten</surname><given-names>R</given-names> </name><name name-style="western"><surname>Slater</surname><given-names>M</given-names> </name></person-group><article-title>The sense of embodiment in virtual reality</article-title><source>Presence Teleoperators Virtual Environ</source><year>2012</year><month>11</month><volume>21</volume><issue>4</issue><fpage>373</fpage><lpage>387</lpage><pub-id pub-id-type="doi">10.1162/PRES_a_00124</pub-id></nlm-citation></ref><ref id="ref14"><label>14</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Slater</surname><given-names>M</given-names> </name><name name-style="western"><surname>Wilbur</surname><given-names>S</given-names> </name></person-group><article-title>A framework for immersive virtual environments (FIVE): speculations on the role of presence in virtual environments</article-title><source>Presence Teleoperators Virtual Environ</source><year>1997</year><month>12</month><volume>6</volume><issue>6</issue><fpage>603</fpage><lpage>616</lpage><pub-id pub-id-type="doi">10.1162/pres.1997.6.6.603</pub-id></nlm-citation></ref><ref id="ref15"><label>15</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Wu</surname><given-names>B</given-names> </name><name name-style="western"><surname>Yu</surname><given-names>X</given-names> </name><name name-style="western"><surname>Gu</surname><given-names>X</given-names> </name></person-group><article-title>Effectiveness of immersive virtual reality using head&#x2010;mounted displays on learning performance: a meta&#x2010;analysis</article-title><source>Brit J Educational Tech</source><year>2020</year><month>11</month><volume>51</volume><issue>6</issue><fpage>1991</fpage><lpage>2005</lpage><pub-id pub-id-type="doi">10.1111/bjet.13023</pub-id></nlm-citation></ref><ref id="ref16"><label>16</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Du</surname><given-names>Q</given-names> </name><name name-style="western"><surname>Song</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Jiang</surname><given-names>H</given-names> </name><name name-style="western"><surname>Wei</surname><given-names>X</given-names> </name><name name-style="western"><surname>Weng</surname><given-names>D</given-names> </name><name name-style="western"><surname>Fan</surname><given-names>M</given-names> </name></person-group><article-title>LightSword: a customized virtual reality exergame for long-term cognitive inhibition training in older adults</article-title><conf-name>CHI &#x2019;24: Proceedings of the 2024 CHI Conference on Human Factors in Computing Systems</conf-name><conf-date>May 11-16, 2024</conf-date><conf-loc>Honolulu, HI</conf-loc><pub-id pub-id-type="doi">10.1145/3613904.3642187</pub-id></nlm-citation></ref><ref id="ref17"><label>17</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Cuthbert</surname><given-names>R</given-names> </name><name name-style="western"><surname>Turkay</surname><given-names>S</given-names> </name><name name-style="western"><surname>Brown</surname><given-names>R</given-names> </name></person-group><article-title>The effects of customisation on player experiences and motivation in a virtual reality game</article-title><conf-name>OZCHI&#x2019;19: 31st Australian Conference on Human-Computer-Interaction</conf-name><conf-date>Dec 2-5, 2019</conf-date><conf-loc>Fremantle, WA, Australia</conf-loc><pub-id pub-id-type="doi">10.1145/3369457.3369475</pub-id></nlm-citation></ref><ref id="ref18"><label>18</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Kao</surname><given-names>D</given-names> </name><name name-style="western"><surname>Ratan</surname><given-names>R</given-names> </name><name name-style="western"><surname>Mousas</surname><given-names>C</given-names> </name><name name-style="western"><surname>Joshi</surname><given-names>A</given-names> </name><name name-style="western"><surname>Melcer</surname><given-names>EF</given-names> </name></person-group><article-title>Audio matters too: how audial avatar customization enhances visual avatar customization</article-title><conf-name>CHI&#x2019; 22: Proceedings of the 2022 CHI Conference on Human Factors in Computing Systems</conf-name><conf-date>Apr 29 to May 5, 2022</conf-date><conf-loc>New Orleans, LA</conf-loc><pub-id pub-id-type="doi">10.1145/3491102.3501848</pub-id></nlm-citation></ref><ref id="ref19"><label>19</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Altarteer</surname><given-names>S</given-names> </name><name name-style="western"><surname>Vassilis</surname><given-names>C</given-names> </name><name name-style="western"><surname>Harrison</surname><given-names>D</given-names> </name></person-group><article-title>Product customisation: virtual reality and new opportunities for luxury brands online trading</article-title><conf-name>Web3D &#x2019;16: The 21st International Conference on Web3D Technology</conf-name><conf-date>Jul 22-24, 2016</conf-date><conf-loc>Anaheim, CA</conf-loc><pub-id pub-id-type="doi">10.1145/2945292.2945317</pub-id></nlm-citation></ref><ref id="ref20"><label>20</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>McArthur</surname><given-names>V</given-names> </name></person-group><article-title>The UX of avatar customization</article-title><conf-name>CHI &#x2019;17: CHI Conference on Human Factors in Computing Systems</conf-name><conf-date>May 6-11, 2017</conf-date><conf-loc>Denver, CO</conf-loc><pub-id pub-id-type="doi">10.1145/3025453.3026020</pub-id></nlm-citation></ref><ref id="ref21"><label>21</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Buck</surname><given-names>L</given-names> </name><name name-style="western"><surname>Young</surname><given-names>GW</given-names> </name><name name-style="western"><surname>McDonnell</surname><given-names>R</given-names> </name></person-group><article-title>Avatar customization, personality, and the perception of work group inclusion in immersive virtual reality</article-title><conf-name>CSCW &#x2019;23: Computer Supported Cooperative Work and Social Computing</conf-name><conf-date>Oct 14-18, 2023</conf-date><conf-loc>Minneapolis, MN</conf-loc><pub-id pub-id-type="doi">10.1145/3584931.3606992</pub-id></nlm-citation></ref><ref id="ref22"><label>22</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Birk</surname><given-names>MV</given-names> </name><name name-style="western"><surname>Mandryk</surname><given-names>RL</given-names> </name></person-group><article-title>Combating attrition in digital self-improvement programs using avatar customization</article-title><conf-name>CHI &#x2019;18: CHI Conference on Human Factors in Computing Systems</conf-name><conf-date>Apr 21-26, 2018</conf-date><conf-loc>Montreal, QC</conf-loc><pub-id pub-id-type="doi">10.1145/3173574.3174234</pub-id></nlm-citation></ref><ref id="ref23"><label>23</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Fitton</surname><given-names>I</given-names> </name><name name-style="western"><surname>Clarke</surname><given-names>C</given-names> </name><name name-style="western"><surname>Dalton</surname><given-names>J</given-names> </name><name name-style="western"><surname>Proulx</surname><given-names>MJ</given-names> </name><name name-style="western"><surname>Lutteroth</surname><given-names>C</given-names> </name></person-group><article-title>Dancing with the avatars: minimal avatar customisation enhances learning in a psychomotor task</article-title><conf-name>CHI &#x2019;23: CHI Conference on Human Factors in Computing Systems</conf-name><conf-date>Apr 23-28, 2023</conf-date><conf-loc>Hamburg, Germany</conf-loc><pub-id pub-id-type="doi">10.1145/3544548.3580944</pub-id></nlm-citation></ref><ref id="ref24"><label>24</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Freeman</surname><given-names>G</given-names> </name><name name-style="western"><surname>Zamanifard</surname><given-names>S</given-names> </name><name name-style="western"><surname>Maloney</surname><given-names>D</given-names> </name><name name-style="western"><surname>Adkins</surname><given-names>A</given-names> </name></person-group><article-title>My body, my avatar: how people perceive their avatars in social virtual reality</article-title><conf-name>CHI &#x2019;20: CHI Conference on Human Factors in Computing Systems</conf-name><conf-date>Apr 25-30, 2020</conf-date><conf-loc>Honolulu, HI</conf-loc><pub-id pub-id-type="doi">10.1145/3334480.3382923</pub-id></nlm-citation></ref><ref id="ref25"><label>25</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Waltemate</surname><given-names>T</given-names> </name><name name-style="western"><surname>Gall</surname><given-names>D</given-names> </name><name name-style="western"><surname>Roth</surname><given-names>D</given-names> </name><name name-style="western"><surname>Botsch</surname><given-names>M</given-names> </name><name name-style="western"><surname>Latoschik</surname><given-names>ME</given-names> </name></person-group><article-title>The impact of avatar personalization and immersion on virtual body ownership, presence, and emotional response</article-title><source>IEEE Trans Vis Comput Graph</source><year>2018</year><month>04</month><volume>24</volume><issue>4</issue><fpage>1643</fpage><lpage>1652</lpage><pub-id pub-id-type="doi">10.1109/TVCG.2018.2794629</pub-id><pub-id pub-id-type="medline">29543180</pub-id></nlm-citation></ref><ref id="ref26"><label>26</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Tajadura-Jim&#x00E9;nez</surname><given-names>A</given-names> </name><name name-style="western"><surname>Banakou</surname><given-names>D</given-names> </name><name name-style="western"><surname>Bianchi-Berthouze</surname><given-names>N</given-names> </name><name name-style="western"><surname>Slater</surname><given-names>M</given-names> </name></person-group><article-title>Embodiment in a child-like talking virtual body influences object size perception, self-identification, and subsequent real speaking</article-title><source>Sci Rep</source><year>2017</year><month>08</month><day>29</day><volume>7</volume><issue>1</issue><fpage>9637</fpage><pub-id pub-id-type="doi">10.1038/s41598-017-09497-3</pub-id><pub-id pub-id-type="medline">28851953</pub-id></nlm-citation></ref><ref id="ref27"><label>27</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Sanchez-Vives</surname><given-names>MV</given-names> </name><name name-style="western"><surname>Spanlang</surname><given-names>B</given-names> </name><name name-style="western"><surname>Frisoli</surname><given-names>A</given-names> </name><name name-style="western"><surname>Bergamasco</surname><given-names>M</given-names> </name><name name-style="western"><surname>Slater</surname><given-names>M</given-names> </name></person-group><article-title>Virtual hand illusion induced by visuomotor correlations</article-title><source>PLoS ONE</source><year>2010</year><month>04</month><day>29</day><volume>5</volume><issue>4</issue><fpage>e10381</fpage><pub-id pub-id-type="doi">10.1371/journal.pone.0010381</pub-id><pub-id pub-id-type="medline">20454463</pub-id></nlm-citation></ref><ref id="ref28"><label>28</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Tsakiris</surname><given-names>M</given-names> </name><name name-style="western"><surname>Haggard</surname><given-names>P</given-names> </name></person-group><article-title>The rubber hand illusion revisited: visuotactile integration and self-attribution</article-title><source>J Exp Psychol Hum Percept Perform</source><year>2005</year><month>02</month><volume>31</volume><issue>1</issue><fpage>80</fpage><lpage>91</lpage><pub-id pub-id-type="doi">10.1037/0096-1523.31.1.80</pub-id><pub-id pub-id-type="medline">15709864</pub-id></nlm-citation></ref><ref id="ref29"><label>29</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Inoue</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Kitazaki</surname><given-names>M</given-names> </name></person-group><article-title>Virtual mirror and beyond: the psychological basis for avatar embodiment via a mirror</article-title><source>J Robotics Mechatronics</source><year>2021</year><month>10</month><day>20</day><volume>33</volume><issue>5</issue><fpage>1004</fpage><lpage>1012</lpage><pub-id pub-id-type="doi">10.20965/jrm.2021.p1004</pub-id></nlm-citation></ref><ref id="ref30"><label>30</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Banakou</surname><given-names>D</given-names> </name><name name-style="western"><surname>Groten</surname><given-names>R</given-names> </name><name name-style="western"><surname>Slater</surname><given-names>M</given-names> </name></person-group><article-title>Illusory ownership of a virtual child body causes overestimation of object sizes and implicit attitude changes</article-title><source>Proc Natl Acad Sci U S A</source><year>2013</year><month>07</month><day>30</day><volume>110</volume><issue>31</issue><fpage>12846</fpage><lpage>12851</lpage><pub-id pub-id-type="doi">10.1073/pnas.1306779110</pub-id><pub-id pub-id-type="medline">23858436</pub-id></nlm-citation></ref><ref id="ref31"><label>31</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Gonzalez-Franco</surname><given-names>M</given-names> </name><name name-style="western"><surname>Perez-Marcos</surname><given-names>D</given-names> </name><name name-style="western"><surname>Spanlang</surname><given-names>B</given-names> </name><name name-style="western"><surname>Slater</surname><given-names>M</given-names> </name></person-group><article-title>The contribution of real-time mirror reflections of motor actions on virtual body ownership in an immersive virtual environment</article-title><conf-name>2010 IEEE Virtual Reality Conference (VR)</conf-name><conf-date>Mar 20-24, 2010</conf-date><conf-loc>Boston, MA</conf-loc><pub-id pub-id-type="doi">10.1109/VR.2010.5444805</pub-id></nlm-citation></ref><ref id="ref32"><label>32</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Slater</surname><given-names>M</given-names> </name><name name-style="western"><surname>Neyret</surname><given-names>S</given-names> </name><name name-style="western"><surname>Johnston</surname><given-names>T</given-names> </name><etal/></person-group><article-title>An experimental study of a virtual reality counselling paradigm using embodied self-dialogue</article-title><source>Sci Rep</source><year>2019</year><month>07</month><day>29</day><volume>9</volume><issue>1</issue><fpage>10903</fpage><pub-id pub-id-type="doi">10.1038/s41598-019-46877-3</pub-id><pub-id pub-id-type="medline">31358846</pub-id></nlm-citation></ref><ref id="ref33"><label>33</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Mal</surname><given-names>D</given-names> </name><name name-style="western"><surname>Wolf</surname><given-names>E</given-names> </name><name name-style="western"><surname>Dollinger</surname><given-names>N</given-names> </name><name name-style="western"><surname>Wienrich</surname><given-names>C</given-names> </name><name name-style="western"><surname>Latoschik</surname><given-names>ME</given-names> </name></person-group><article-title>The impact of avatar and environment congruence on plausibility, embodiment, presence, and the proteus effect in virtual reality</article-title><source>IEEE Trans Vis Comput Graph</source><year>2023</year><month>05</month><volume>29</volume><issue>5</issue><fpage>2358</fpage><lpage>2368</lpage><pub-id pub-id-type="doi">10.1109/TVCG.2023.3247089</pub-id><pub-id pub-id-type="medline">37027615</pub-id></nlm-citation></ref><ref id="ref34"><label>34</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Wilson</surname><given-names>AC</given-names> </name><name name-style="western"><surname>Mackintosh</surname><given-names>K</given-names> </name><name name-style="western"><surname>Power</surname><given-names>K</given-names> </name><name name-style="western"><surname>Chan</surname><given-names>SWY</given-names> </name></person-group><article-title>Effectiveness of self-compassion related therapies: a systematic review and meta-analysis</article-title><source>Mindfulness (N Y)</source><year>2019</year><month>06</month><volume>10</volume><issue>6</issue><fpage>979</fpage><lpage>995</lpage><pub-id pub-id-type="doi">10.1007/s12671-018-1037-6</pub-id><pub-id pub-id-type="medline">31354877</pub-id></nlm-citation></ref><ref id="ref35"><label>35</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Germer</surname><given-names>CK</given-names> </name><name name-style="western"><surname>Neff</surname><given-names>KD</given-names> </name></person-group><article-title>Self-compassion in clinical practice</article-title><source>J Clin Psychol</source><year>2013</year><month>08</month><volume>69</volume><issue>8</issue><fpage>856</fpage><lpage>867</lpage><pub-id pub-id-type="doi">10.1002/jclp.22021</pub-id><pub-id pub-id-type="medline">23775511</pub-id></nlm-citation></ref><ref id="ref36"><label>36</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Weidner</surname><given-names>F</given-names> </name><name name-style="western"><surname>Boettcher</surname><given-names>G</given-names> </name><name name-style="western"><surname>Arboleda</surname><given-names>SA</given-names> </name><etal/></person-group><article-title>A systematic review on the visualization of avatars and agents in AR &#x0026; VR displayed using head-mounted displays</article-title><source>IEEE Trans Vis Comput Graph</source><year>2023</year><month>05</month><volume>29</volume><issue>5</issue><fpage>2596</fpage><lpage>2606</lpage><pub-id pub-id-type="doi">10.1109/TVCG.2023.3247072</pub-id><pub-id pub-id-type="medline">37027741</pub-id></nlm-citation></ref><ref id="ref37"><label>37</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Neff</surname><given-names>KD</given-names> </name></person-group><article-title>The self-compassion scale is a valid and theoretically coherent measure of self-compassion</article-title><source>Mindfulness (N Y)</source><year>2016</year><month>02</month><volume>7</volume><issue>1</issue><fpage>264</fpage><lpage>274</lpage><pub-id pub-id-type="doi">10.1007/s12671-015-0479-3</pub-id></nlm-citation></ref><ref id="ref38"><label>38</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kroenke</surname><given-names>K</given-names> </name><name name-style="western"><surname>Strine</surname><given-names>TW</given-names> </name><name name-style="western"><surname>Spitzer</surname><given-names>RL</given-names> </name><name name-style="western"><surname>Williams</surname><given-names>JBW</given-names> </name><name name-style="western"><surname>Berry</surname><given-names>JT</given-names> </name><name name-style="western"><surname>Mokdad</surname><given-names>AH</given-names> </name></person-group><article-title>The PHQ-8 as a measure of current depression in the general population</article-title><source>J Affect Disord</source><year>2009</year><month>04</month><volume>114</volume><issue>1-3</issue><fpage>163</fpage><lpage>173</lpage><pub-id pub-id-type="doi">10.1016/j.jad.2008.06.026</pub-id><pub-id pub-id-type="medline">18752852</pub-id></nlm-citation></ref><ref id="ref39"><label>39</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Usoh</surname><given-names>M</given-names> </name><name name-style="western"><surname>Catena</surname><given-names>E</given-names> </name><name name-style="western"><surname>Arman</surname><given-names>S</given-names> </name><name name-style="western"><surname>Slater</surname><given-names>M</given-names> </name></person-group><article-title>Using presence questionnaires in reality</article-title><source>Presence Teleoperators Virtual Environ</source><year>2000</year><month>10</month><volume>9</volume><issue>5</issue><fpage>497</fpage><lpage>503</lpage><pub-id pub-id-type="doi">10.1162/105474600566989</pub-id></nlm-citation></ref><ref id="ref40"><label>40</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Braun</surname><given-names>V</given-names> </name><name name-style="western"><surname>Clarke</surname><given-names>V</given-names> </name></person-group><article-title>Using thematic analysis in psychology</article-title><source>Qual Res Psychol</source><year>2006</year><month>01</month><volume>3</volume><issue>2</issue><fpage>77</fpage><lpage>101</lpage><pub-id pub-id-type="doi">10.1191/1478088706qp063oa</pub-id></nlm-citation></ref><ref id="ref41"><label>41</label><nlm-citation citation-type="web"><article-title>Stylized customizable avatars</article-title><source>Sunbox Games</source><access-date>2025-09-17</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://sunbox.games/asset/stylized-customizable-avatars/">https://sunbox.games/asset/stylized-customizable-avatars/</ext-link></comment></nlm-citation></ref><ref id="ref42"><label>42</label><nlm-citation citation-type="book"><person-group person-group-type="author"><name name-style="western"><surname>van Gisbergen</surname><given-names>MS</given-names> </name><name name-style="western"><surname>Sensagir</surname><given-names>I</given-names> </name><name name-style="western"><surname>Relouw</surname><given-names>J</given-names> </name></person-group><person-group person-group-type="editor"><name name-style="western"><surname>Jung</surname><given-names>T</given-names> </name><name name-style="western"><surname>tom Dieck</surname><given-names>MC</given-names> </name><name name-style="western"><surname>Rauschnabel</surname><given-names>PA</given-names> </name></person-group><article-title>How real do you see yourself in VR? The effect of user-avatar resemblance on virtual reality experiences and behaviour</article-title><source>Augmented Reality and Virtual Reality: Changing Realities in a Dynamic World</source><year>2020</year><fpage>401</fpage><lpage>409</lpage><pub-id pub-id-type="doi">10.1007/978-3-030-37869-1_32</pub-id></nlm-citation></ref><ref id="ref43"><label>43</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Lugrin</surname><given-names>JL</given-names> </name><name name-style="western"><surname>Latt</surname><given-names>J</given-names> </name><name name-style="western"><surname>Latoschik</surname><given-names>ME</given-names> </name></person-group><article-title>Avatar anthropomorphism and illusion of body ownership in VR</article-title><conf-name>2015 IEEE Virtual Reality (VR)</conf-name><conf-date>Mar 23-27, 2015</conf-date><conf-loc>Arles, France</conf-loc><pub-id pub-id-type="doi">10.1109/VR.2015.7223379</pub-id></nlm-citation></ref><ref id="ref44"><label>44</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Mori</surname><given-names>M</given-names> </name><name name-style="western"><surname>MacDorman</surname><given-names>KF</given-names> </name><name name-style="western"><surname>Kageki</surname><given-names>N</given-names> </name></person-group><article-title>The uncanny valley [from the field]</article-title><source>IEEE Robotics Automation Magazine</source><year>2012</year><month>06</month><volume>19</volume><issue>2</issue><fpage>98</fpage><lpage>100</lpage><pub-id pub-id-type="doi">10.1109/MRA.2012.2192811</pub-id></nlm-citation></ref><ref id="ref45"><label>45</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Becker-Asano</surname><given-names>C</given-names> </name><name name-style="western"><surname>Ogawa</surname><given-names>K</given-names> </name><name name-style="western"><surname>Nishio</surname><given-names>S</given-names> </name><name name-style="western"><surname>Ishiguro</surname><given-names>H</given-names> </name></person-group><article-title>Exploring the uncanny valley with geminoid HI-1 in a real-world application</article-title><access-date>2025-09-26</access-date><conf-name>IADIS International Conference Interfaces and Human Computer Interaction</conf-name><conf-date>Jul 26-30, 2010</conf-date><conf-loc>Freiburg, Germany</conf-loc><comment><ext-link ext-link-type="uri" xlink:href="https://www.researchgate.net/profile/Christian_Becker-Asano/publication/229059888_Exploring_the_uncanny_valley_with_Geminoid_HI-1_in_a_real-world_application/links/0912f50b5da8f939c9000000.pdf">https://www.researchgate.net/profile/Christian_Becker-Asano/publication/229059888_Exploring_the_uncanny_valley_with_Geminoid_HI-1_in_a_real-world_application/links/0912f50b5da8f939c9000000.pdf</ext-link></comment></nlm-citation></ref><ref id="ref46"><label>46</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Schrepp</surname><given-names>M</given-names> </name><name name-style="western"><surname>Hinderks</surname><given-names>A</given-names> </name><name name-style="western"><surname>Thomaschewski</surname><given-names>J</given-names> </name></person-group><article-title>Construction of a benchmark for the User Experience Questionnaire (UEQ)</article-title><source>Int J Interactive Multimedia Artif Intelligence</source><year>2017</year><volume>4</volume><issue>4</issue><fpage>40</fpage><pub-id pub-id-type="doi">10.9781/ijimai.2017.445</pub-id></nlm-citation></ref><ref id="ref47"><label>47</label><nlm-citation citation-type="book"><person-group person-group-type="author"><name name-style="western"><surname>Neff</surname><given-names>KD</given-names> </name><name name-style="western"><surname>T&#x00F3;th-Kir&#x00E1;ly</surname><given-names>I</given-names> </name></person-group><person-group person-group-type="editor"><name name-style="western"><surname>Medvedev</surname><given-names>ON</given-names> </name><name name-style="western"><surname>Kr&#x00E4;geloh</surname><given-names>CU</given-names> </name><name name-style="western"><surname>Siegert</surname><given-names>RJ</given-names> </name><name name-style="western"><surname>Singh</surname><given-names>NN</given-names> </name></person-group><article-title>Self-Compassion Scale (SCS)</article-title><source>Handbook of Assessment in Mindfulness Research</source><year>2022</year><publisher-name>Springer</publisher-name><fpage>1</fpage><lpage>22</lpage><pub-id pub-id-type="doi">10.1007/978-3-030-77644-2_36-1</pub-id></nlm-citation></ref><ref id="ref48"><label>48</label><nlm-citation citation-type="book"><person-group person-group-type="author"><name name-style="western"><surname>McMahan</surname><given-names>RP</given-names> </name><name name-style="western"><surname>Lai</surname><given-names>C</given-names> </name><name name-style="western"><surname>Pal</surname><given-names>SK</given-names> </name></person-group><person-group person-group-type="editor"><name name-style="western"><surname>Lackey</surname><given-names>S</given-names> </name><name name-style="western"><surname>Shumaker</surname><given-names>R</given-names> </name></person-group><article-title>Interaction fidelity: the uncanny valley of virtual reality interactions</article-title><source>Virtual, Augmented and Mixed Reality: 8th International Conference, VAMR 2016, Held as Part of HCI International 2016, Toronto, Canada, July 17-22, 2016 Proceedings</source><year>2016</year><publisher-name>Springer</publisher-name><pub-id pub-id-type="doi">10.1007/978-3-319-39907-2_6</pub-id></nlm-citation></ref><ref id="ref49"><label>49</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Muris</surname><given-names>P</given-names> </name><name name-style="western"><surname>Otgaar</surname><given-names>H</given-names> </name></person-group><article-title>The process of science: a critical evaluation of more than 15 years of research on self-compassion with the self-compassion scale</article-title><source>Mindfulness (N Y)</source><year>2020</year><month>06</month><volume>11</volume><issue>6</issue><fpage>1469</fpage><lpage>1482</lpage><pub-id pub-id-type="doi">10.1007/s12671-020-01363-0</pub-id></nlm-citation></ref><ref id="ref50"><label>50</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Elliott</surname><given-names>TC</given-names> </name><name name-style="western"><surname>Yang</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Knibbe</surname><given-names>J</given-names> </name><name name-style="western"><surname>Henry</surname><given-names>JD</given-names> </name><name name-style="western"><surname>Baghaei</surname><given-names>N</given-names> </name></person-group><article-title>Customizing stylized avatars for self-compassion: design impacts in virtual reality therapy (forthcoming)</article-title><conf-name>MARMH&#x2019;25: Proceedings of the Mixed/Augmented Reality for Mental Health, Workshop, IEEE International Symposium on Mixed and Augmented Reality &#x2013; Adjunct (ISMAR 2025)</conf-name><conf-date>Oct 8-12, 2025</conf-date><conf-loc>Daejeon, South Korea</conf-loc></nlm-citation></ref></ref-list><app-group><supplementary-material id="app1"><label>Multimedia Appendix 1</label><p>Tables presenting additional quantitative and qualitative outcomes across studies 1-3.</p><media xlink:href="formative_v9i1e71004_app1.docx" xlink:title="DOCX File, 32 KB"/></supplementary-material></app-group></back></article>