<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "http://dtd.nlm.nih.gov/publishing/2.0/journalpublishing.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article" dtd-version="2.0">
  <front>
    <journal-meta>
      <journal-id journal-id-type="publisher-id">JFR</journal-id>
      <journal-id journal-id-type="nlm-ta">JMIR Form Res</journal-id>
      <journal-title>JMIR Formative Research</journal-title>
      <issn pub-type="epub">2561-326X</issn>
      <publisher>
        <publisher-name>JMIR Publications</publisher-name>
        <publisher-loc>Toronto, Canada</publisher-loc>
      </publisher>
    </journal-meta>
    <article-meta>
      <article-id pub-id-type="publisher-id">v9i1e79280</article-id>
      <article-id pub-id-type="pmid">41187325</article-id>
      <article-id pub-id-type="doi">10.2196/79280</article-id>
      <article-categories>
        <subj-group subj-group-type="heading">
          <subject>Original Paper</subject>
        </subj-group>
        <subj-group subj-group-type="article-type">
          <subject>Original Paper</subject>
        </subj-group>
      </article-categories>
      <title-group>
        <article-title>Quantitative Assessment of Strabismus Using Cloud AI Computing: Validation Study</article-title>
      </title-group>
      <contrib-group>
        <contrib contrib-type="editor">
          <name>
            <surname>Sarvestan</surname>
            <given-names>Javad</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Abegg</surname>
            <given-names>Mathias</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Norouzpour</surname>
            <given-names>Amir</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib id="contrib1" contrib-type="author" equal-contrib="yes">
          <name name-style="western">
            <surname>He</surname>
            <given-names>Junxian</given-names>
          </name>
          <degrees>BE</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <xref rid="aff2" ref-type="aff">2</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0009-0003-9262-9219</ext-link>
        </contrib>
        <contrib id="contrib2" contrib-type="author" equal-contrib="yes">
          <name name-style="western">
            <surname>Zhang</surname>
            <given-names>Jiawei</given-names>
          </name>
          <degrees>BMed</degrees>
          <xref rid="aff3" ref-type="aff">3</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-2068-5049</ext-link>
        </contrib>
        <contrib id="contrib3" contrib-type="author">
          <name name-style="western">
            <surname>Wang</surname>
            <given-names>Zheng</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff4" ref-type="aff">4</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-3497-1361</ext-link>
        </contrib>
        <contrib id="contrib4" contrib-type="author">
          <name name-style="western">
            <surname>Pundlik</surname>
            <given-names>Shrinivas</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0001-8766-7112</ext-link>
        </contrib>
        <contrib id="contrib5" contrib-type="author" corresp="yes">
          <name name-style="western">
            <surname>Liu</surname>
            <given-names>Rui</given-names>
          </name>
          <degrees>MD, PhD</degrees>
          <xref rid="aff3" ref-type="aff">3</xref>
          <address>
            <institution>Department of Ophthalmology</institution>
            <institution>Eye &#38; ENT Hospital of Fudan University</institution>
            <addr-line>83 Fenyang Road, Xuhui District</addr-line>
            <addr-line>Shanghai, 200031</addr-line>
            <country>China</country>
            <phone>86 13764064088</phone>
            <email>lratb1@aliyun.com</email>
          </address>
          <xref rid="aff5" ref-type="aff">5</xref>
          <xref rid="aff6" ref-type="aff">6</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-4092-2339</ext-link>
        </contrib>
        <contrib id="contrib6" contrib-type="author">
          <name name-style="western">
            <surname>Luo</surname>
            <given-names>Gang</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0003-0623-6236</ext-link>
        </contrib>
      </contrib-group>
      <aff id="aff1">
        <label>1</label>
        <institution>Schepens Eye Research Institute of Mass Eye &#38; Ear</institution>
        <institution>Harvard Medical School</institution>
        <addr-line>Boston, MA</addr-line>
        <country>United States</country>
      </aff>
      <aff id="aff2">
        <label>2</label>
        <institution>Chongqing University</institution>
        <addr-line>Chongqing</addr-line>
        <country>China</country>
      </aff>
      <aff id="aff3">
        <label>3</label>
        <institution>Department of Ophthalmology</institution>
        <institution>Eye &#38; ENT Hospital of Fudan University</institution>
        <addr-line>Shanghai</addr-line>
        <country>China</country>
      </aff>
      <aff id="aff4">
        <label>4</label>
        <institution>School of Medicine</institution>
        <institution>Jiaxing University</institution>
        <addr-line>Jiaxing</addr-line>
        <country>China</country>
      </aff>
      <aff id="aff5">
        <label>5</label>
        <institution>Key Laboratory of Myopia and Related Eye Diseases</institution>
        <institution>Fudan University</institution>
        <addr-line>Shanghai</addr-line>
        <country>China</country>
      </aff>
      <aff id="aff6">
        <label>6</label>
        <institution>Shanghai Key Laboratory of Visual Impairment and Restoration</institution>
        <addr-line>Shanghai</addr-line>
        <country>China</country>
      </aff>
      <author-notes>
        <corresp>Corresponding Author: Rui Liu <email>lratb1@aliyun.com</email></corresp>
      </author-notes>
      <pub-date pub-type="collection">
        <year>2025</year>
      </pub-date>
      <pub-date pub-type="epub">
        <day>4</day>
        <month>11</month>
        <year>2025</year>
      </pub-date>
      <volume>9</volume>
      <elocation-id>e79280</elocation-id>
      <history>
        <date date-type="received">
          <day>19</day>
          <month>6</month>
          <year>2025</year>
        </date>
        <date date-type="rev-request">
          <day>8</day>
          <month>8</month>
          <year>2025</year>
        </date>
        <date date-type="rev-recd">
          <day>9</day>
          <month>9</month>
          <year>2025</year>
        </date>
        <date date-type="accepted">
          <day>2</day>
          <month>10</month>
          <year>2025</year>
        </date>
      </history>
      <copyright-statement>©Junxian He, Jiawei Zhang, Zheng Wang, Shrinivas Pundlik, Rui Liu, Gang Luo. Originally published in JMIR Formative Research (https://formative.jmir.org), 04.11.2025.</copyright-statement>
      <copyright-year>2025</copyright-year>
      <license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/">
        <p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (https://creativecommons.org/licenses/by/4.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in JMIR Formative Research, is properly cited. The complete bibliographic information, a link to the original publication on https://formative.jmir.org, as well as this copyright and license information must be included.</p>
      </license>
      <self-uri xlink:href="https://formative.jmir.org/2025/1/e79280" xlink:type="simple"/>
      <abstract>
        <sec sec-type="background">
          <title>Background</title>
          <p>Strabismus measurement is essential in vision assessment and screening. It typically requires skilled clinicians or specialized equipment. Photographic strabismus measurement methods have value in terms of accessibility and convenience of use.</p>
        </sec>
        <sec sec-type="objective">
          <title>Objective</title>
          <p>This study aimed to evaluate Eyeturn Cloud, a cloud-based artificial intelligence (AI) system for measuring strabismus angles based on eye images captured with smartphone cameras under cover test conditions.</p>
        </sec>
        <sec sec-type="methods">
          <title>Methods</title>
          <p>The Eyeturn Cloud web app uses AI models to recognize eyes, eye lid, and iris, and then to segment iris precisely. It then computes strabismus based on ellipse fitting of the iris boundary and corneal reflection. The system was evaluated in patients (without glasses) with manifest strabismus and control participants. Clinicians measured eye deviations using the prism alternate cover test and also captured pictures of their eyes under alternate cover and unilateral cover conditions. The pictures were processed by Eyeturn Cloud.</p>
        </sec>
        <sec sec-type="results">
          <title>Results</title>
          <p>In total, 79 (mean age 11.9, SD 6.3 years; esotropia: n=15, exotropia: n=55, orthotropia: n=9) participants were enrolled; of which, data were available for 71 participants (8/79, 10.1% processing failure). The range of prism alternate cover test strabismus magnitude was from 78 base in to 78 base out prism diopters (PDs). A strong correlation was found between Eyeturn Cloud and clinical measurements (<italic>R</italic><sup>2</sup>=0.95; slope=0.91; <italic>P</italic>&#60;.001). Bland-Altman analysis revealed that 95% limits of agreement between the 2 measurements were –20.2 to 14.6 PD. A repeatability test with 15 participants (4 photos each) found a 1.53 PD SD.</p>
        </sec>
        <sec sec-type="conclusions">
          <title>Conclusions</title>
          <p>The cloud AI web app can compute strabismus angles reliably under alternate and unilateral cover conditions in clinical settings, and its potential for use in telehealth settings needs further evaluation.</p>
        </sec>
      </abstract>
      <kwd-group>
        <kwd>strabismus</kwd>
        <kwd>cloud computing</kwd>
        <kwd>AI models</kwd>
        <kwd>artificial intelligence</kwd>
        <kwd>smartphone</kwd>
        <kwd>web app</kwd>
      </kwd-group>
    </article-meta>
  </front>
  <body>
    <sec sec-type="introduction">
      <title>Introduction</title>
      <p>Strabismus, a condition where the eyes are misaligned, can cause a wide range of visual dysfunctions in children [<xref ref-type="bibr" rid="ref1">1</xref>,<xref ref-type="bibr" rid="ref2">2</xref>] and adults [<xref ref-type="bibr" rid="ref3">3</xref>]. According to the survey [<xref ref-type="bibr" rid="ref4">4</xref>], strabismus not only negatively impacts activities of daily living but also causes self-concerns regarding appearance to others, even in most patients without diplopia. Current clinical assessments of strabismus are based on some form of cover testing with prisms to determine the presence and the magnitude of eye deviation. This procedure requires skilled and experienced clinicians using instruments such as a prism bar or synoptophore and also a high degree of cooperation from the patients for accurate assessments.</p>
      <p>With the development of digital health care technology, various approaches have been developed for automating or simplifying the strabismus measurement process, including eye tracker–based systems [<xref ref-type="bibr" rid="ref5">5</xref>-<xref ref-type="bibr" rid="ref7">7</xref>], virtual reality–based systems [<xref ref-type="bibr" rid="ref8">8</xref>], and smart glasses [<xref ref-type="bibr" rid="ref9">9</xref>]. For vision screening purposes [<xref ref-type="bibr" rid="ref10">10</xref>], some commercial screening devices such as Volk Eye Check, Spot Vision Screener, Plusoptix, iScreen, 2WIN, GazeLab [<xref ref-type="bibr" rid="ref11">11</xref>], neos [<xref ref-type="bibr" rid="ref12">12</xref>], and blinq are available. These devices are typically built on optoelectronic technologies and are relatively easier to use than conventional prisms. Some, for example, the blinq device, have shown very high sensitivity and specificity for strabismus detection [<xref ref-type="bibr" rid="ref13">13</xref>]. However, these devices typically only raise red flags when concerning strabismus is detected and generally do not provide quantitative measures (with some exceptions, eg, 2WIN).</p>
      <p>For the reasons of costs, accessibility, and sometimes operating difficulties, dedicated screening devices are not available in many settings, such as resource-limited regions, nursing homes, and in telehealth scenarios [<xref ref-type="bibr" rid="ref14">14</xref>,<xref ref-type="bibr" rid="ref15">15</xref>]. In these settings, photographic methods, which just need a picture of patients’ eyes, may be preferable.</p>
      <p>Approaches to measure strabismus based on a photograph include using dedicated cameras [<xref ref-type="bibr" rid="ref16">16</xref>-<xref ref-type="bibr" rid="ref18">18</xref>], or more recently, using a smartphone camera and flash to take a picture of the eyes and detect strabismus [<xref ref-type="bibr" rid="ref19">19</xref>-<xref ref-type="bibr" rid="ref24">24</xref>]. Other automated systems have used different technologies, such as virtual reality headsets or dedicated infrared eye trackers [<xref ref-type="bibr" rid="ref25">25</xref>,<xref ref-type="bibr" rid="ref26">26</xref>]. Strabismus assessment based on captured photos using web tools [<xref ref-type="bibr" rid="ref27">27</xref>] and artificial intelligence (AI) models [<xref ref-type="bibr" rid="ref28">28</xref>] has been developed. Photographic assessment of strabismus, via mobile apps, cloud computing, and AI algorithms, offers the advantages of accessibility and convenience.</p>
      <p>Most photoscreening devices, including smartphone apps, are limited to measuring manifested strabismus, and intermittent patients with strabismus are likely to be missed. The ability to elicit and capture the eye movement based on covering 1 or both eye alternatingly remains a challenging problem for mobile apps and many other photoscreening devices. In fact, in our previous study of the Eyeturn app [<xref ref-type="bibr" rid="ref19">19</xref>], a stand-alone photographic strabismus measurement tool, we indicated that the cover or uncover feature within the app required offline processing of the videos. In addition to the inability to effectively measure strabismus under cover or uncover or alternating cover (AC) mode, limitations of the prior stand-alone Eyeturn app include on‑device model constraints (only lightweight models capable of running on mobile hardware could be used), uncorrected corneal‑reflex or iris detection errors and lack of quality control, device variability that caused software mismatch with hardware, and inability to reprocess previously saved images (the app only worked with live camera capture).</p>
      <p>To overcome this challenge, we have developed a smartphone app (cover test app) to enable automatic picture capture under various cover or uncover conditions [<xref ref-type="bibr" rid="ref29">29</xref>] and an Eyeturn Cloud web app for strabismus angle calculation based on the uploaded pictures of the eyes. In this study, the cover test app is merely a facilitator for the precise capture of eye movements at the instant of uncovering (see demonstration video in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>). The present cloud system addresses these gaps by using a different workflow for image capture in different cover test modes, which improves processing via various large computer vision models. The current cloud app can be used for retrospective reprocessing of archived images for recalculation and audit.</p>
      <p>By separating photo capturing and image analysis, the Eyeturn Cloud web app was developed with the ultimate goal of improving accessibility and reducing dependence on specialized equipment, dedicated apps, or skilled operators for strabismus assessment. While the cover test app may be convenient, it is not strictly necessary to capture images for uploading to Eyeturn Cloud. One can use regular digital cameras or native phone camera apps to take photos, if the goal is to assess manifested strabismus (cover is not needed). Used together with either regular phone camera app or our dedicated cover test app [<xref ref-type="bibr" rid="ref29">29</xref>], the proposed solution could be potentially used for vision screenings in resource-limited areas and for telehealth settings (such as virtual visits for prescreening or follow-up [<xref ref-type="bibr" rid="ref14">14</xref>] and for vision rehabilitation [<xref ref-type="bibr" rid="ref15">15</xref>]), where pictures taken by nonprofessional personnel could be uploaded for quantitative analysis and then reviewed by an ophthalmic expert at remote sites.</p>
      <p>Together with other complementary apps for measuring various visual functions (eg, visual acuity and refractive error), Eyeturn Cloud may potentially help in enabling a more accessible and scalable solution in the management of strabismus. For patient subgroups such as children, a rapid, photographic-based method may improve cooperation compared to conventional examinations that require prolonged fixation [<xref ref-type="bibr" rid="ref30">30</xref>]. Furthermore, obtaining objective, quantitative measurements is critical for monitoring disease progression or treatment response over time, which is a cornerstone of pediatric strabismus management [<xref ref-type="bibr" rid="ref31">31</xref>]. Access to pediatric ophthalmology is uneven, with notable rural and underserved gaps. Contemporary screening recommendations endorse early detection but often lack scalable, quantitative tools deployable outside specialty clinics. By enabling photographic acquisition with cloud‑based, standardized quantification, our system can provide actionable, reproducible measurements for triage and follow‑up by nonspecialist personnel (eg, school nurses and primary care staff) and in community settings, potentially shortening time‑to‑referral and improving longitudinal care. Importantly, no specialized equipment or specialist presence is required. A simple photograph suffices for measurement and can be acquired in routine environments without dedicated clinical instrumentation.</p>
      <p>This study quantitatively evaluates the strabismus measurement approach using the cover test app for image capture and the AI-powered cloud computing paradigm for strabismus angle calculation. The primary objective was to evaluate the cloud-AI platform’s effectiveness in accurately measuring strabismus angles. We hypothesize that the Eyeturn Cloud platform can provide quantitative measurements of strabismus that are highly consistent with the clinical gold standard of cover testing with prism. The primary outcomes for this validation are the coefficient of determination (<italic>R</italic><sup>2</sup>) from linear regression and the 95% limits of agreement from Bland-Altman analysis. A secondary outcome is the test-retest reliability of the platform’s measurements.</p>
    </sec>
    <sec sec-type="methods">
      <title>Methods</title>
      <sec>
        <title>Pipeline of Strabismus Calculation</title>
        <p>The entire image processing is executed in a cloud app instance running in Amazon Web Services (AWS). As illustrated in <xref rid="figure1" ref-type="fig">Figure 1</xref>, the app includes 4 key steps to calculate eye deviation. First, using the YOLOv5-face (You Look Only Once) model [<xref ref-type="bibr" rid="ref32">32</xref>], the eyes are detected, localized, and cropped. Next, using an hourglass network [<xref ref-type="bibr" rid="ref33">33</xref>] trained with a synthesized eye image set [<xref ref-type="bibr" rid="ref34">34</xref>], the iris is localized in cropped eye images. Using the center of the iris as a reference point, the Segment Anything Model (SAM) [<xref ref-type="bibr" rid="ref35">35</xref>] is used to segment the visible iris region. Finally, an ellipse is fitted to the segmented iris. The center of the ellipse is considered as the precise location of the iris center. Combining with corneal reflection points, which are detected by simple thresholding, eye deviation can be calculated according to the Hirschberg method [<xref ref-type="bibr" rid="ref16">16</xref>,<xref ref-type="bibr" rid="ref20">20</xref>,<xref ref-type="bibr" rid="ref36">36</xref>]. We did not train YOLOv5-face and SAM models. Pretrained versions were used with default parameters for inference.</p>
        <fig id="figure1" position="float">
          <label>Figure 1</label>
          <caption>
            <p>Image processing pipeline of Eyeturn Cloud web app for eye deviation calculation. The processing includes 4 steps—eye localization, iris localization, iris segmentation, iris boundary fitting, and deviation calculation. SAM: Segment Anything Model; YOLO: You Look Only Once.</p>
          </caption>
          <graphic xlink:href="formative_v9i1e79280_fig1.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Iris Detection</title>
        <p>Iris is detected using a coarse-to-fine approach. After obtaining the coordinates of the eyes, the left and right eye regions are cropped from the input image and resized to 72×120 pixels. The subimages are processed by the hourglass network for localizing the iris and eyelids. The network is a stacked-hourglass network trained on a synthetic eye image set [<xref ref-type="bibr" rid="ref34">34</xref>]. Detected iris and eyelid landmarks are represented by polygons, which approximate the curves of those eye features. Based on our evaluation, the center of the iris polygon is not precise enough for strabismus calculation. Therefore, the iris center is then selected as the reference point for more precise segmentation using SAM [<xref ref-type="bibr" rid="ref35">35</xref>], which segments the visible iris region pixel-wise. SAM generates 3 segmentation results ranked by probability. The result with the highest probability is not necessarily the best segmentation. Sometimes it corresponds to the pupil rather than the iris. To address this, we calculate the number of pixels within the area delineated by the iris segmentation results and exclude the pixels that are outside the eyelid polygon. The number of remaining pixels in each segmentation and shape are verified for reasonability. The closest match to the expected iris is chosen as the final segmentation.</p>
        <p>Since the iris is partially occluded by eyelids in most cases, the center of the segmented iris is not the true iris center. Therefore, an ellipse is fitted to the boundary of the segmented iris. To achieve a better elliptical fit, we applied morphological opening to the segmented iris to smooth the boundary without significantly altering the area. Then, the segmentation edge of the iris was extracted, retaining only the curved lines on the left and right sides. Using the <italic>LsqEllipse</italic> package [<xref ref-type="bibr" rid="ref37">37</xref>,<xref ref-type="bibr" rid="ref38">38</xref>], an ellipse was fitted to these curved lines to represent the shape of the iris, and the center of the ellipse was regarded as the precise center of the iris.</p>
        <p>The AI models for ocular landmark detection or segmentation were trained on diverse datasets from different image sources. Therefore, it is anticipated that the strabismus measurement method is hardware agnostic, and no retraining is needed for different acquisition hardware.</p>
      </sec>
      <sec>
        <title>Cloud App: Eyeturn Cloud</title>
        <p>The front-end of Eyeturn Cloud was developed using Cascading Style Sheets and JavaScript and was initiated with the Python <italic>Flask</italic> package. The Eyeturn Cloud website prompts users to upload a photo, and a “Process image” button appears once the upload is successful. Upon clicking the button, the back-end is activated to perform strabismus calculations (<xref rid="figure2" ref-type="fig">Figure 2</xref>). The back-end, developed in Python, was responsible for calculating the strabismus result based on the photo sent from the front-end. Once the calculation is completed, the cropped and processed eye images, along with the strabismus result, are displayed. The user could then verify the accuracy of the strabismus result by reviewing the processed eye images, with the estimated iris ellipse and corneal light reflex points overlaid. The Eyeturn Cloud website is deployed on AWS. We selected a t2.2xlarge instance with 8 virtual central processing units to accelerate strabismus calculation and 32 GB RAM to accommodate the YOLOv5-face, High-Resolution Network, and SAM models. This instance was configured to run on Ubuntu 22.04 as a virtual server to ensure that the Eyeturn Cloud platform works continuously.</p>
        <fig id="figure2" position="float">
          <label>Figure 2</label>
          <caption>
            <p>A screenshot of the Eyeturn Cloud interface when the web app was accessed from a phone. The uploaded picture shows that the participant covered his right eye with his hand unilaterally and then quickly uncovered. The ellipse fitting of iris is shown as a blue oval. If the fitting is obviously wrong, users should reject the result.</p>
          </caption>
          <graphic xlink:href="formative_v9i1e79280_fig2.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Cover Test App</title>
        <p>A specialized app for capturing pictures during cover or uncover or AC tests was developed because the intermittent deviation needs to be computed at the precise moment, just after the cover is removed. Otherwise, there is a risk that the measured deviation via the app is different from the true deviation. The cover test app is based on the idea of detecting the frame when both eyes are simultaneously present [<xref ref-type="bibr" rid="ref29">29</xref>]. The detection of eyes is done using custom-trained YOLOv5 deep learning model, capable of working in real time on contemporary iPhones. To develop the real-time eye detection model used in our data capture app (the cover test app), we created a custom dataset by collecting face images from public web sources and from our own captures. A total of 1013 eyes were manually annotated from these images. The dataset was split into training (810 eyes), validation (101 eyes), and testing (102 eyes) sets. We trained the model by fine-tuning the official YOLOv5 pretrained checkpoint on our custom dataset, using the source code with its default training parameters.</p>
        <p>The procedure to perform cover or uncover or AC tests with the cover test app is similar to the conventional way. The examiner places an occluder in front of one eye, while the other eye fixates on the flash. The occluder is then removed away from the face by the examiner at an appropriate time, revealing both eyes. Once the app detects the presence of both eyes, it saves that frame, which can then be uploaded to Eyeturn Cloud for strabismus angle calculation. Since the app works in real time, there is very little delay in capturing the just-uncovered eye (approximately 33-66 milliseconds). In case of strabismus, the just uncovered eye is likely to be deviated, and this can be analyzed from the corneal reflection in the captured frame. Thus, the app facilitates measurement of intermittent strabismus, but it should be noted that Eyeturn Cloud does not have to be paired with this cover test app. It can process pictures captured through other ways; for instance, a frame extracted from a video.</p>
      </sec>
      <sec>
        <title>Ethical Considerations</title>
        <p>The clinical evaluation study was conducted in accordance with the tenets of the Declaration of Helsinki. The study was approved by the institutional review board of the Eye and ENT Hospital of Fudan University (Shanghai, China; approval: 2024126). Written informed consent was obtained from all participants prior to enrollment, including details about the study’s purpose, procedures, potential risks and benefits, and the right to withdraw at any time without repercussions; for children, parental or guardian consent was obtained. Participant selection was not influenced by sex or age. Images were processed locally on a computer (localhost) rather than uploaded to external cloud services like AWS during the study, minimizing data exposure. No identifiable information was shared beyond the research team. Participants did not receive any compensation for their participation. See <xref ref-type="supplementary-material" rid="app2">Multimedia Appendix 2</xref> for a completed checklist related to adherence to accepted guidelines for transparency, reproducibility, and methodological rigor in AI diagnostic studies. Data are not shared as per the conditions placed by the institutional review board to protect patient privacy.</p>
      </sec>
      <sec>
        <title>Sample Size Calculation</title>
        <p>From our previous study of the Eyeturn smartphone app [<xref ref-type="bibr" rid="ref17">17</xref>], the SD of the difference between the app and clinical measurements was 4.1 prism diopters (PDs). Our preliminary analysis showed that our computation system can provide measurements with a reliability of ±3 PDs. Assuming twice the variability in this study and a type I error rate of 0.05 and 80% power, we arrive at a sample size of 64. Considering dropouts and capture errors, the calculated sample size was inflated further by 25% to arrive at a figure of 80 participants as the recruitment target. In total, 79 participants (mean age 11.9, SD 6.3; range 4-42 years) were enrolled from Eye and ENT Hospital of Fudan University (Shanghai, China).</p>
      </sec>
      <sec>
        <title>Participant Recruitment</title>
        <p>Patients with strabismus were recruited from the patient cohort treated by the author (RL) at their affiliated ophthalmology clinic. Inclusion criteria were a prior diagnosis of horizontal strabismus (constant or intermittent exotropia or esotropia) and no other visual impairments. Participants were eligible only if the best‑corrected visual acuity (BCVA) in the worse eye was 20/40 or better (Snellen; 0.30 logMAR), using habitual correction or trial‑frame refraction. Strabismic amblyopia was not an automatic exclusion if the BCVA threshold was met. In our cohort, only 3 participants had strabismic amblyopia, and they satisfied the BCVA better than or equal to 20/40 criterion. Therefore, they were included. As our current system is not sufficiently robust yet to process images of eye wearing glasses, patients with refractive errors higher than 1 diopter were excluded, concerning that they may not be able to fixate at the target properly without vision correction during the cover test. Cases of incomitant strabismus, such as paralytic strabismus, or those with a clinically significant vertical deviation (&#62;5 PD) were also excluded. In addition, 9 participants without manifest strabismus were also enrolled to serve as control participants.</p>
      </sec>
      <sec>
        <title>Study Procedures</title>
        <p>Clinicians specialized in strabismus measurement performed the prism alternate cover test for near fixation (about 40 cm). In addition, they also took pictures under 3 cover conditions, AC, left eye cover-uncover (LC), and right eye cover-uncover (RC), using the cover test smartphone app as described earlier [<xref ref-type="bibr" rid="ref29">29</xref>]. Those pictures were uploaded to the Eyeturn Cloud app, and the results are compared with prism alternate cover test measurements.</p>
        <p>The image resolution used for this study was 3024 by 4032. The images were captured with a consumer iPhone 11 and 13, with autofocus at a distance of approximately 40 cm from the face.</p>
        <p>We prioritized using the strabismus angle calculated from the AC images as the primary result in this study. If the AC image processing of a patient could not yield a result due to missing corneal reflection, SAM segmentation errors, or other issues, we selected the larger absolute value of the strabismus results from the LC or RC images as the final result.</p>
      </sec>
      <sec>
        <title>Outcome Measures</title>
        <p>The clinician’s measurement was used as the ground truth, and the cloud computing results were analyzed by linear regression. A Bland-Altman analysis was also conducted to compare the physician’s assessment and the Eyeturn Cloud results.</p>
      </sec>
    </sec>
    <sec sec-type="results">
      <title>Results</title>
      <p>Among the 79 enrolled participants (including 15 esotropia, 55 exotropia, and 9 orthotropia), strabismus was successfully calculated by the Eyeturn Cloud app for 71 participants. The processing failed for the remaining 8 (10.1%) participants due to failure to detect the corneal reflection for 1 participant and incorrect iris segmentation for 7 participants (<xref rid="figure3" ref-type="fig">Figure 3</xref>). The average age of the participants where processing failed was 11 (SD 5) years, which is similar to the age distribution of the entire sample 11.9 (SD 3.6) years. There were 4 male and 4 female participants, and an equal number of base in (43, 32, 77, and 40 PD) and base out (32, 77, 32, and 43 PD) deviations with processing failure. Since the study was conducted in China, the eye color was mostly uniform (deep brown). The failures did not seem to be related to demographic or clinical factors. This type of incorrect detection or segmentation was determined by visually examining the iris ellipse fitting overlaid on processed pictures.</p>
      <p>In this study, we consider strabismus calculation larger than 104 PD to be an indication of incorrect image processing, since the highest 2 powers of the prism set used by the clinicians were 45 and 40 PD. With the 2 prisms used at the same time (1 prism on each eye), the largest strabismus would be 104 PD according to the lookup table of Cestari and Hunter [<xref ref-type="bibr" rid="ref39">39</xref>]. None of the measurements in this study were above this threshold. The linear regression analysis and coefficient of determination are shown in <xref rid="figure4" ref-type="fig">Figure 4</xref>A, with a determination coefficient (<italic>R</italic><sup>2</sup>) of 0.95. The slope of the linear regression was close to 1 (0.91). When analyzed separately, the slopes of regression for base out and base in deviations were 0.908 and 0.904, respectively (since they were similar, the data were not separated). As the Bland-Altman plot showed in <xref rid="figure4" ref-type="fig">Figure 4</xref>B, the 95% limits of agreement was –20.2 to 14.6 PD.</p>
      <p>Furthermore, image assessment based on different cover methods was compared using repeated measures ANOVA, which revealed a significant difference (<italic>P</italic>=.007; <italic>F</italic><sub>2</sub>=5.55). The averages of the 3 cover methods were 38.3 (SD 23.7), 30.8 (SD 17.2), and 32.1 (SD 17.8) PD for AC, LC, and RC, respectively. Bilateral cover test (based on AC images) revealed slightly but statistically significantly larger strabismus magnitudes (by about 6 to 8 PD) than unilateral cover test (based on LC and RC images). It is known that bilateral cover can better break binocular fusion. In this analysis, 23 participants who had calculation results for all the AC, LC, and RC images were included (<xref rid="figure5" ref-type="fig">Figure 5</xref>). The other participants had at least 1 calculation missing. When analyzed separately, the regression line slopes for AC, LC, and RC were 0.9, 0.92, and 0.97, respectively.</p>
      <p>To evaluate the repeatability of Eyeturn Cloud, another set of 15 participants (different than the ones reported earlier), with a mean age of 13.4 (SD 11.5) years (male: n=9 and female: n=6) and including 3 participants with exotropia, 3 with esotropia, and 9 with minimal deviation (&#60;10 PD), were repeatedly photographed 4 times without covering. They were instructed to stare at the flash binocularly when pictures were taken. The variability across trials was calculated for each participant by subtracting the individual average. The SD of the variability was 1.53 PD. Therefore, the 95% CI (2σ) of the measurement was ±2.99 PD (<xref rid="figure6" ref-type="fig">Figure 6</xref>). This random noise level can be used as an indicator of the discrimination threshold of the system [<xref ref-type="bibr" rid="ref40">40</xref>]. In addition, based on the image resolution, the minimum magnitude of phoria detectable (corresponding to 1 pixel for the image resolution of 3024×4032 used in the study) is about 2 PD, which is in a similar range as the measurement variability. Therefore, we rate the smallest amplitude of phoria detectable by the system to be ≈3PD.</p>
      <fig id="figure3" position="float">
        <label>Figure 3</label>
        <caption>
          <p>Correct and incorrect examples of Eyeturn cloud processing. Upper panel: coarse iris and eyelid detection, middle panel: fine segmentation of iris, and lower panel: ellipse fitting of iris boundary. (A) A successful processing, (B) corneal reflection could not be detected because of many other reflections in the background, and (C) the segmentation and ellipse fitting of right eye iris are obviously incorrect; therefore, the calculation was rejected.</p>
        </caption>
        <graphic xlink:href="formative_v9i1e79280_fig3.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
      </fig>
      <fig id="figure4" position="float">
        <label>Figure 4</label>
        <caption>
          <p>Correlation and agreement between photographic and clinical strabismus measurements. (A) Linear regression analysis comparing strabismus calculation results and physician measurements. (B) Bland-Altman plot for agreement between the strabismus detection system and the physician measurements. In both panels, data points derived from AC images (n=45) are shown as hollow circles, while points derived from unilateral cover (LC or RC) images (n=26) are shown as hollow triangles. AC: alternating cover; LC: left eye cover-uncover; PD: prism diopter; RC: right eye cover-uncover.</p>
        </caption>
        <graphic xlink:href="formative_v9i1e79280_fig4.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
      </fig>
      <fig id="figure5" position="float">
        <label>Figure 5</label>
        <caption>
          <p>Each line in this graph represents the 3 strabismus calculation results based on AC, LC, and RC images, respectively. Bilateral covering (AC images) revealed larger strabismus than unilateral covering (LC and RC images). AC: alternating cover; LC: left eye cover-uncover; PD: prism diopter; RC: right eye cover-uncover.</p>
        </caption>
        <graphic xlink:href="formative_v9i1e79280_fig5.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
      </fig>
      <fig id="figure6" position="float">
        <label>Figure 6</label>
        <caption>
          <p>Repeatability of photographic assessment. Each participant (n=15) was photographed 4 times, and the residual errors after subtracting the individual’s average were plotted here. Two dashed lines indicate ±2σ, which represent the 95% CI, ±2.99 PD. PD: prism diopter.</p>
        </caption>
        <graphic xlink:href="formative_v9i1e79280_fig6.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
      </fig>
    </sec>
    <sec sec-type="discussion">
      <title>Discussion</title>
      <sec>
        <title>Principal Findings</title>
        <p>In this study, we developed and evaluated a web-based platform, Eyeturn Cloud, for strabismus angle calculation. We evaluated the level of agreement between the strabismus angles computed by Eyeturn Cloud based on uploaded pictures of patients’ eyes and standard clinical measurements using prisms. The evaluation showed that the strabismus angles computed by the image-based cloud app had a strong linear correlation with clinical measurements (<italic>R</italic><sup>2</sup>=0.95; slope=0.91; <xref rid="figure4" ref-type="fig">Figure 4</xref>). The measures were obtained from different types of strabismus and for different types of cover conditions.</p>
      </sec>
      <sec>
        <title>Comparison to Prior Work</title>
        <p>While the limits of agreement (<xref rid="figure4" ref-type="fig">Figure 4</xref>B) may not seem to be small, 2 issues should be noted. First, the repeatability of prism measurement by clinicians may not be very high, depending on deviation amplitude, examiner skill, and patient factor. The Bland-Altman limits of agreement (–20.2 to +14.6 PD) for the cloud app measurements are slightly worse than the interexaminer agreement limits (13 PD reported by de Jongh et al [<xref ref-type="bibr" rid="ref41">41</xref>], or ±10 PD reported by Hatt et al [<xref ref-type="bibr" rid="ref42">42</xref>], or ±11.7 PD reported by the Pediatric Eye Disease Investigator Group [<xref ref-type="bibr" rid="ref43">43</xref>]) for large strabismus angles at near fixation. However, the limits of agreement for the Eyeturn Cloud app with prism testing compare favorably with other photographic strabismus assessment methods. For example, Strabocheck [<xref ref-type="bibr" rid="ref27">27</xref>] evaluation reported 95% interval limit between –30.0 and 31.0 PD, and Garcia et al [<xref ref-type="bibr" rid="ref23">23</xref>] reported agreement limits between –15.78 and 24.53 PD.</p>
        <p>Second, the moderate error for small phoria of some control participants was due to the incorrect determination of esophoria versus exophoria, which leads to the incorrect sign of results. For instance, an exophoria of 3 PD was interpreted as an esophoria of –7PD. Thus, the disagreement became 10 PD. While it is still within the limits of agreement as mentioned earlier, the incorrect sign can be an error of clinical significance. There were 3 participants with orthophoria (3 PD base out, 6 PD base out, and 3 PD base in) misclassified in data analysis. One of the causes of this problem is that the visual axis is not precisely aligned with the pupil center because of the angle κ. Even when an eye is fixating on the flash, the corneal reflection is off the center nasally. In this study, we applied an offset of 4° angle κ in both eyes of participants with small deviation (&#60;10 PD). As the angle κ of each individual eye was unknown, and because the current Eyeturn Cloud user interface does not have an option to indicate whether the uploaded image was taken with cover or not, or which eye was last uncovered if it was a cover test image, the deviated eye could not be identified simply by the larger offset. However, for relatively large deviations, base out and base in can be reliably determined based on the corneal reflection offset in the deviated eye. The current Eyeturn Cloud does not indicate base out or base in for users when the calculated deviation is smaller than 10 PD, in order to prevent misclassification in actual application. In this paper, the 3 participants with orthotropia were still counted misclassification just for the sake of data analysis. How to determine small esophoria and exophoria correctly is our future work. One possible approach can be through analyzing eye movement during the cover test based on videos. An alternative approach would be implementing a protocol where both eyes are captured monocularly (under unilateral cover conditions) as well as binocular condition (no cover). From the joint analysis of the 3 images, small-angle esotropia and exotropia could be classified with greater accuracy.</p>
        <p>The proposed Eyeturn Cloud solution is similar to the Eyeturn smartphone app [<xref ref-type="bibr" rid="ref19">19</xref>] in terms of the underlying approach of strabismus computation, although the landmarks within the eyes are now detected using AI algorithms. Compared to the 2WIN handheld photoscreening device that can provide quantitative strabismus measures, our approach is different in terms of imaging (visible vs infrared) and processing (iris vs pupil segmentation). Evaluation of the 2WIN portable device in 137 children with manifest strabismus and with dilated pupils showed <italic>R</italic><sup>2</sup> values of 0.58 and 0.24 for esotropia and exotropia, respectively, when compared to alternate cover prism testing [<xref ref-type="bibr" rid="ref21">21</xref>]. The maximum deviation included in that study was &#60;40 PD. In addition, there are some differences between Eyeturn Cloud and previous web-based approaches that compute the strabismus angle from uploaded pictures. Compared to the Strabocheck website [<xref ref-type="bibr" rid="ref27">27</xref>], which requires 3 pictures (left eye, right eye, and both eyes) and manually marking eye features, our approach can automatically assess strabismus based on a single picture, which can be captured under no cover, unilateral cover for 1 eye, or alternate cover conditions. Compared to the AI-based platform developed by Wu et al [<xref ref-type="bibr" rid="ref28">28</xref>] for strabismus screening that detects the presence of strabismus from ocular pictures, Eyeturn Cloud provides a quantitative measurement of the eye deviation.</p>
      </sec>
      <sec>
        <title>Strength</title>
        <p>The Eyeturn Cloud platform enables the use of general digital devices, like smartphones and cameras, to capture and upload eye images for analysis by a web app in the cloud. This approach not only reduces the need for expensive, specialized equipment but also expands access to screening for a broader population. The platform’s reliance on cloud infrastructure ensures that complex computations are handled remotely, eliminating the processing burden on local devices and allowing for batch processing of large datasets. This capability is particularly advantageous in large-scale screening programs, such as those conducted in schools, communities, or rural areas, where high-volume data processing must be processed efficiently. The ability to upload images offline and process them later when internet access is available is particularly crucial for rural sites.</p>
        <p>While in this study, the pictures during unilateral cover or alternate cover testing were captured by a special-purpose cover test smartphone app, it should be noted that this app is not strictly necessary for capturing the appropriate pictures. Users could use any modern native camera apps (eg, live photo feature of iPhone camera app) or record a high-resolution (4K) video, and then, select the required frame when the eye is first visible after removal of the cover (see demonstration in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>). In the future, the Eyeturn Cloud app could be updated to accept high-resolution videos and automatically select the appropriate frame based on the visibility of both eyes to compute strabismus angles. Furthermore, for constant strabismus assessment, cover is not needed. Thus, pictures taken with regular cameras can be processed by Eyeturn Cloud to yield valid results.</p>
        <p>One potential advantage of cloud AI platforms is that they can facilitate the maintenance of digital archives of patient data, allowing ophthalmologists to monitor the progression of strabismus over time and adjust their treatment plans accordingly. Such a data-driven approach has the potential to enhance diagnostic accuracy and promote more personalized care, ultimately improving patient outcomes through timely interventions and continuous analysis. In addition, these AI platforms may support telemedicine-based screening and follow-up care [<xref ref-type="bibr" rid="ref14">14</xref>,<xref ref-type="bibr" rid="ref15">15</xref>], which is especially beneficial in areas lacking access to specialized ophthalmic equipment. This reduces the need for frequent patient visits and facilitates consistent remote monitoring [<xref ref-type="bibr" rid="ref44">44</xref>]. Regarding screening, the Eyeturn smartphone app was previously used for strabismus screening in school children in a pilot study [<xref ref-type="bibr" rid="ref45">45</xref>]. As the underlying computation is conceptually similar, it is reasonable to expect that Eyeturn Cloud could also be potentially useful in such screening settings. However, the utility of Eyeturn Cloud in screening and telehealth settings is yet to be determined and is future work.</p>
      </sec>
      <sec>
        <title>Limitations</title>
        <p>There are still some unresolved issues with the current app. The cloud app was able to generate valid strabismus results for 71 patients, while the remaining 8 patients could not yield results. Among these, the results were invalid due to the absence of corneal light reflex points or incorrect iris segmentation by SAM. The issue of missing corneal light reflex points could potentially be resolved by selecting an appropriate environment for photography or by attempting multiple captures. On the other hand, eyes with glasses were excluded in this study, as the flash could interfere with the glasses causing spurious reflections, if caution was not taken during picture capturing. This could be solved in the future by more intelligent algorithms than simple thresholding methods.</p>
        <p>In theory, our method could reliably measure ocular misalignment above 3 PD, but determining the directionality of misalignment is a challenge for photoscreening based on a single image. To mitigate sign instability near 0, the app withholds directionality when the estimated magnitude is &#60;10 PD. Despite lack of directionality, strabismus magnitude measurement can still be useful information for vision screening.</p>
        <p>There are also some limitations of this study. All of the pictures included in this study were all taken with our cover test app by clinicians in controlled settings rather than by lay people. This environment likely ensured optimal lighting, correct camera distance, and maximal patient cooperation. The usability, performance, and reliability of the Eyeturn Cloud platform when used by laypersons, such as parents at home for a telehealth consultation or by school nurses in a community screening program, remain unknown. In addition, pictures captured using other acquisition methods, such as native camera apps on other smartphones, have not been evaluated. Another limitation was that the number of base in deviation was higher than base out in our participants, due to the biased prevalence in the population [<xref ref-type="bibr" rid="ref46">46</xref>], and future studies need to consider balancing the sample for more thorough analysis of app’s performance for base out deviations. The study only evaluated images captured along primary gaze direction, and differentiating incomitant and concomitant strabismus and measuring the deviation patterns are future work. However, the findings of this study could guide future studies, where its utility for telehealth and screening applications could be fully evaluated.</p>
      </sec>
      <sec>
        <title>Conclusions</title>
        <p>The main novelty lies in the approach where we developed the cloud-AI app for strabismus angle computation based on a single picture, which can be acquired from different sources. While AI tools exist for strabismus screening from a single image [<xref ref-type="bibr" rid="ref28">28</xref>], our app focuses on quantitative measurement of angular deviation. Our proposed solution was intended to address scenarios where a cover test is needed as well as to offer a possibility to assess photos captured with regular cameras without covering. Cover-testing mode is important for measuring patients with nonmanifest strabismus. The cloud-AI platform treats all image sources in the same way. Overall, the Eyeturn Cloud offers a convenient and accessible way of strabismus assessment. This study showed that quantitative assessment of strabismus with Eyeturn Cloud was repeatable and demonstrated a strong correlation with clinical measurements, although further work is required to improve its precision and address misclassifications of small-angle deviations. While the platform’s architecture suggests potential for future application in vision screening and telehealth, its efficacy and reliability in these real-world settings, particularly when used by nonexperts with varied hardware, require rigorous future investigation before any such use can be recommended.</p>
      </sec>
    </sec>
  </body>
  <back>
    <app-group>
      <supplementary-material id="app1">
        <label>Multimedia Appendix 1</label>
        <p>Demonstration video.</p>
        <media xlink:href="formative_v9i1e79280_app1.mp4" xlink:title="MP4 File  (MP4 Video), 9431 KB"/>
      </supplementary-material>
      <supplementary-material id="app2">
        <label>Multimedia Appendix 2</label>
        <p>MAIC-10 brief quality checklist.</p>
        <media xlink:href="formative_v9i1e79280_app2.pdf" xlink:title="PDF File  (Adobe PDF File), 12 KB"/>
      </supplementary-material>
    </app-group>
    <glossary>
      <title>Abbreviations</title>
      <def-list>
        <def-item>
          <term id="abb1">AC</term>
          <def>
            <p>alternating cover</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb2">AI</term>
          <def>
            <p>artificial intelligence</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb3">AWS</term>
          <def>
            <p>Amazon Web Services</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb4">BCVA</term>
          <def>
            <p>best‑corrected visual acuity</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb5">LC</term>
          <def>
            <p>left eye cover-uncover</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb6">PD</term>
          <def>
            <p>prism diopter</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb7">RC</term>
          <def>
            <p>right eye cover-uncover</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb8">SAM</term>
          <def>
            <p>Segment Anything Model</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb9">YOLO</term>
          <def>
            <p>You Look Only Once</p>
          </def>
        </def-item>
      </def-list>
    </glossary>
    <ack>
      <p>This research is supported in part by the China Medical Board (grant #24-558). Generative artificial intelligence was not used for manuscript generation.</p>
    </ack>
    <fn-group>
      <fn fn-type="conflict">
        <p>SP and GL hold a patent related to strabismus measurement, which has been assigned to Mass Eye &#38; Ear. Additionally, GL and SP serve as cofounders of EyeNexo, a startup company that develops smartphone apps for vision tests. However, EyeNexo is not involved in this study.</p>
      </fn>
    </fn-group>
    <ref-list>
      <ref id="ref1">
        <label>1</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Robaei</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Rose</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Kifley</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Cosstick</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Ip</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Mitchell</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <article-title>Factors associated with childhood strabismus: findings from a population-based study</article-title>
          <source>Ophthalmology</source>
          <year>2006</year>
          <volume>113</volume>
          <issue>7</issue>
          <fpage>1146</fpage>
          <lpage>1153</lpage>
          <pub-id pub-id-type="doi">10.1016/j.ophtha.2006.02.019</pub-id>
          <pub-id pub-id-type="medline">16675019</pub-id>
          <pub-id pub-id-type="pii">S0161-6420(06)00277-6</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref2">
        <label>2</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Freeman</surname>
              <given-names>AW</given-names>
            </name>
            <name name-style="western">
              <surname>Nguyen</surname>
              <given-names>VA</given-names>
            </name>
            <name name-style="western">
              <surname>Jolly</surname>
              <given-names>N</given-names>
            </name>
          </person-group>
          <article-title>Components of visual acuity loss in strabismus</article-title>
          <source>Vision Res</source>
          <year>1996</year>
          <volume>36</volume>
          <issue>5</issue>
          <fpage>765</fpage>
          <lpage>774</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/0042-6989(95)00171-9"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/0042-6989(95)00171-9</pub-id>
          <pub-id pub-id-type="medline">8762305</pub-id>
          <pub-id pub-id-type="pii">0042-6989(95)00171-9</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref3">
        <label>3</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Rowe</surname>
              <given-names>F</given-names>
            </name>
            <collab>VIS group UK</collab>
          </person-group>
          <article-title>The profile of strabismus in stroke survivors</article-title>
          <source>Eye (Lond)</source>
          <year>2010</year>
          <volume>24</volume>
          <issue>4</issue>
          <fpage>682</fpage>
          <lpage>685</lpage>
          <pub-id pub-id-type="doi">10.1038/eye.2009.138</pub-id>
          <pub-id pub-id-type="medline">19521433</pub-id>
          <pub-id pub-id-type="pii">eye2009138</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref4">
        <label>4</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hatt</surname>
              <given-names>SR</given-names>
            </name>
            <name name-style="western">
              <surname>Leske</surname>
              <given-names>DA</given-names>
            </name>
            <name name-style="western">
              <surname>Kirgis</surname>
              <given-names>PA</given-names>
            </name>
            <name name-style="western">
              <surname>Bradley</surname>
              <given-names>EA</given-names>
            </name>
            <name name-style="western">
              <surname>Holmes</surname>
              <given-names>JM</given-names>
            </name>
          </person-group>
          <article-title>The effects of strabismus on quality of life in adults</article-title>
          <source>Am J Ophthalmol</source>
          <year>2007</year>
          <volume>144</volume>
          <issue>5</issue>
          <fpage>643</fpage>
          <lpage>647</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/17707329"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.ajo.2007.06.032</pub-id>
          <pub-id pub-id-type="medline">17707329</pub-id>
          <pub-id pub-id-type="pii">S0002-9394(07)00610-1</pub-id>
          <pub-id pub-id-type="pmcid">PMC2241762</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref5">
        <label>5</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Zou</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Tian</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Wygnanski-Jaffe</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Yehezkel</surname>
              <given-names>O</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Moshkovitz</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Sun</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>Effectiveness and repeatability of eye-tracking-based test in strabismus measurement of children</article-title>
          <source>Semin Ophthalmol</source>
          <year>2022</year>
          <volume>37</volume>
          <issue>4</issue>
          <fpage>502</fpage>
          <lpage>508</lpage>
          <pub-id pub-id-type="doi">10.1080/08820538.2021.2002918</pub-id>
          <pub-id pub-id-type="medline">34814794</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref6">
        <label>6</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Economides</surname>
              <given-names>JR</given-names>
            </name>
            <name name-style="western">
              <surname>Dilbeck</surname>
              <given-names>MD</given-names>
            </name>
            <name name-style="western">
              <surname>Gentry</surname>
              <given-names>TN</given-names>
            </name>
            <name name-style="western">
              <surname>Horton</surname>
              <given-names>JC</given-names>
            </name>
          </person-group>
          <article-title>Ambulatory monitoring with eye tracking glasses to assess the severity of intermittent exotropia</article-title>
          <source>Am J Ophthalmol</source>
          <year>2023</year>
          <volume>250</volume>
          <fpage>120</fpage>
          <lpage>129</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/36681174"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.ajo.2023.01.014</pub-id>
          <pub-id pub-id-type="medline">36681174</pub-id>
          <pub-id pub-id-type="pii">S0002-9394(23)00027-2</pub-id>
          <pub-id pub-id-type="pmcid">PMC10266486</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref7">
        <label>7</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Zhao</surname>
              <given-names>Z</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Gao</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>A strabismus widespread screening method based on wearable eye tracker</article-title>
          <year>2024</year>
          <conf-name>IEEE BioSensors Conference (BioSensors)</conf-name>
          <conf-date>July 28-30, 2024</conf-date>
          <conf-loc>Cambridge, United Kingdom</conf-loc>
          <pub-id pub-id-type="doi">10.1109/biosensors61405.2024.10712701</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref8">
        <label>8</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mori</surname>
              <given-names>DM</given-names>
            </name>
            <name name-style="western">
              <surname>Kuchhangi</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Tame</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Cooper</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Hajkazemshirazi</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Indaram</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Keenan</surname>
              <given-names>JD</given-names>
            </name>
            <name name-style="western">
              <surname>Oatts</surname>
              <given-names>JT</given-names>
            </name>
          </person-group>
          <article-title>Evaluation of a novel virtual reality simulated alternate cover test to assess strabismus: a prospective, masked study</article-title>
          <source>Am J Ophthalmol</source>
          <year>2025</year>
          <volume>269</volume>
          <fpage>266</fpage>
          <lpage>272</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S0002-9394(24)00418-5"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.ajo.2024.08.042</pub-id>
          <pub-id pub-id-type="medline">39245131</pub-id>
          <pub-id pub-id-type="pii">S0002-9394(24)00418-5</pub-id>
          <pub-id pub-id-type="pmcid">PMC12230876</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref9">
        <label>9</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>L-C</given-names>
            </name>
            <name name-style="western">
              <surname>Feng</surname>
              <given-names>KM</given-names>
            </name>
            <name name-style="western">
              <surname>Chuang</surname>
              <given-names>P-C</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>Y-H</given-names>
            </name>
            <name name-style="western">
              <surname>Chien</surname>
              <given-names>K-H</given-names>
            </name>
          </person-group>
          <article-title>Preliminary data on a novel smart glasses system for measuring the angle of deviation in strabismus</article-title>
          <source>Eye (Lond)</source>
          <year>2023</year>
          <volume>37</volume>
          <issue>13</issue>
          <fpage>2700</fpage>
          <lpage>2706</lpage>
          <pub-id pub-id-type="doi">10.1038/s41433-023-02402-5</pub-id>
          <pub-id pub-id-type="medline">36702909</pub-id>
          <pub-id pub-id-type="pii">10.1038/s41433-023-02402-5</pub-id>
          <pub-id pub-id-type="pmcid">PMC10483031</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref10">
        <label>10</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Sopeyin</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Young</surname>
              <given-names>BK</given-names>
            </name>
            <name name-style="western">
              <surname>Howard</surname>
              <given-names>MA</given-names>
            </name>
          </person-group>
          <article-title>2020 Evaluation of portable vision screening instruments</article-title>
          <source>Yale J Biol Med</source>
          <year>2021</year>
          <volume>94</volume>
          <issue>1</issue>
          <fpage>107</fpage>
          <lpage>114</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/33795987"/>
          </comment>
          <pub-id pub-id-type="medline">33795987</pub-id>
          <pub-id pub-id-type="pii">yjbm941107</pub-id>
          <pub-id pub-id-type="pmcid">PMC7995935</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref11">
        <label>11</label>
        <nlm-citation citation-type="web">
          <article-title>GazeLab</article-title>
          <source>BCNINNOVA</source>
          <access-date>2025-10-11</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.bcninnova.com/index.php?action=Gazelab">https://www.bcninnova.com/index.php?action=Gazelab</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref12">
        <label>12</label>
        <nlm-citation citation-type="web">
          <article-title>neos</article-title>
          <source>machineMD</source>
          <access-date>2025-10-11</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.machinemd.com/neos">https://www.machinemd.com/neos</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref13">
        <label>13</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Jost</surname>
              <given-names>RM</given-names>
            </name>
            <name name-style="western">
              <surname>Yanni</surname>
              <given-names>SE</given-names>
            </name>
            <name name-style="western">
              <surname>Beauchamp</surname>
              <given-names>CL</given-names>
            </name>
            <name name-style="western">
              <surname>Stager</surname>
              <given-names>DR</given-names>
            </name>
            <name name-style="western">
              <surname>Stager</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Dao</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Birch</surname>
              <given-names>EE</given-names>
            </name>
          </person-group>
          <article-title>Beyond screening for risk factors: objective detection of strabismus and amblyopia</article-title>
          <source>JAMA Ophthalmol</source>
          <year>2014</year>
          <volume>132</volume>
          <issue>7</issue>
          <fpage>814</fpage>
          <lpage>820</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/24875453"/>
          </comment>
          <pub-id pub-id-type="doi">10.1001/jamaophthalmol.2014.424</pub-id>
          <pub-id pub-id-type="medline">24875453</pub-id>
          <pub-id pub-id-type="pii">1874718</pub-id>
          <pub-id pub-id-type="pmcid">PMC4334567</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref14">
        <label>14</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bowe</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Hunter</surname>
              <given-names>DG</given-names>
            </name>
            <name name-style="western">
              <surname>Mantagos</surname>
              <given-names>IS</given-names>
            </name>
            <name name-style="western">
              <surname>Kazlas</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Jastrzembski</surname>
              <given-names>BG</given-names>
            </name>
            <name name-style="western">
              <surname>Gaier</surname>
              <given-names>ED</given-names>
            </name>
            <name name-style="western">
              <surname>Massey</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Franz</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Schumann</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Brown</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Meyers</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Shah</surname>
              <given-names>AS</given-names>
            </name>
          </person-group>
          <article-title>Virtual visits in ophthalmology: timely advice for implementation during the COVID-19 public health crisis</article-title>
          <source>Telemed J E Health</source>
          <year>2020</year>
          <volume>26</volume>
          <issue>9</issue>
          <fpage>1113</fpage>
          <lpage>1117</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/32408801"/>
          </comment>
          <pub-id pub-id-type="doi">10.1089/tmj.2020.0121</pub-id>
          <pub-id pub-id-type="medline">32408801</pub-id>
          <pub-id pub-id-type="pmcid">PMC7640749</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref15">
        <label>15</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Keilty</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Houston</surname>
              <given-names>KE</given-names>
            </name>
            <name name-style="western">
              <surname>Collins</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Trehan</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Merabet</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Watts</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Pundlik</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Luo</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>Inpatient virtual vision clinic improves access to vision rehabilitation before and during the COVID-19 pandemic</article-title>
          <source>Arch Rehabil Res Clin Transl</source>
          <year>2021</year>
          <volume>3</volume>
          <issue>1</issue>
          <fpage>100100</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S2590-1095(20)30093-8"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.arrct.2020.100100</pub-id>
          <pub-id pub-id-type="medline">33363279</pub-id>
          <pub-id pub-id-type="pii">S2590-1095(20)30093-8</pub-id>
          <pub-id pub-id-type="pmcid">PMC7749728</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref16">
        <label>16</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hasebe</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Ohtsuki</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Tadokoro</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Okano</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Furuse</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>The reliability of a video-enhanced Hirschberg test under clinical conditions</article-title>
          <source>Invest Ophthalmol Vis Sci</source>
          <year>1995</year>
          <volume>36</volume>
          <issue>13</issue>
          <fpage>2678</fpage>
          <lpage>2685</lpage>
          <pub-id pub-id-type="medline">7499090</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref17">
        <label>17</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>de Almeida</surname>
              <given-names>JDS</given-names>
            </name>
            <name name-style="western">
              <surname>Silva</surname>
              <given-names>AC</given-names>
            </name>
            <name name-style="western">
              <surname>de Paiva</surname>
              <given-names>AC</given-names>
            </name>
            <name name-style="western">
              <surname>Teixeira</surname>
              <given-names>JAM</given-names>
            </name>
          </person-group>
          <article-title>Computational methodology for automatic detection of strabismus in digital images through Hirschberg test</article-title>
          <source>Comput Biol Med</source>
          <year>2012</year>
          <volume>42</volume>
          <issue>1</issue>
          <fpage>135</fpage>
          <lpage>146</lpage>
          <pub-id pub-id-type="doi">10.1016/j.compbiomed.2011.11.001</pub-id>
          <pub-id pub-id-type="medline">22119221</pub-id>
          <pub-id pub-id-type="pii">S0010-4825(11)00214-9</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref18">
        <label>18</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Barry</surname>
              <given-names>J C</given-names>
            </name>
            <name name-style="western">
              <surname>Backes</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Limbus versus pupil center for ocular alignment measurement with corneal reflexes</article-title>
          <source>Invest Ophthalmol Vis Sci</source>
          <year>1997</year>
          <volume>38</volume>
          <issue>12</issue>
          <fpage>2597</fpage>
          <lpage>2607</lpage>
          <pub-id pub-id-type="medline">9375579</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref19">
        <label>19</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Pundlik</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Tomasi</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Houston</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Luo</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>Development and preliminary evaluation of a smartphone app for measuring eye alignment</article-title>
          <source>Transl Vis Sci Technol</source>
          <year>2019</year>
          <volume>8</volume>
          <issue>1</issue>
          <fpage>19</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/30766761"/>
          </comment>
          <pub-id pub-id-type="doi">10.1167/tvst.8.1.19</pub-id>
          <pub-id pub-id-type="medline">30766761</pub-id>
          <pub-id pub-id-type="pii">TVST-18-0930</pub-id>
          <pub-id pub-id-type="pmcid">PMC6369861</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref20">
        <label>20</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Luo</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Pundlik</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Tomasi</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Houston</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>Using an automated Hirschberg test app to evaluate ocular alignment</article-title>
          <source>J Vis Exp</source>
          <year>2020</year>
          <issue>157</issue>
          <fpage>e60908</fpage>
          <pub-id pub-id-type="doi">10.3791/60908</pub-id>
          <pub-id pub-id-type="medline">32281979</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref21">
        <label>21</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Racano</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Di Stefano</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Alessi</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Pertile</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Romanelli</surname>
              <given-names>F</given-names>
            </name>
          </person-group>
          <article-title>Validation of the 2WIN Corneal Reflexes App in children</article-title>
          <source>Graefes Arch Clin Exp Ophthalmol</source>
          <year>2021</year>
          <volume>259</volume>
          <issue>6</issue>
          <fpage>1635</fpage>
          <lpage>1642</lpage>
          <pub-id pub-id-type="doi">10.1007/s00417-020-05066-z</pub-id>
          <pub-id pub-id-type="medline">33404678</pub-id>
          <pub-id pub-id-type="pii">10.1007/s00417-020-05066-z</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref22">
        <label>22</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Tenório Albuquerque Madruga Mesquita</surname>
              <given-names>M J</given-names>
            </name>
            <name name-style="western">
              <surname>Azevedo Valente</surname>
              <given-names>TL</given-names>
            </name>
            <name name-style="western">
              <surname>de Almeida</surname>
              <given-names>JDS</given-names>
            </name>
            <name name-style="western">
              <surname>Meireles Teixeira</surname>
              <given-names>JA</given-names>
            </name>
            <name name-style="western">
              <surname>Cord Medina</surname>
              <given-names>FM</given-names>
            </name>
            <name name-style="western">
              <surname>Dos Santos</surname>
              <given-names>AM</given-names>
            </name>
          </person-group>
          <article-title>A mhealth application for automated detection and diagnosis of strabismus</article-title>
          <source>Int J Med Inform</source>
          <year>2021</year>
          <volume>153</volume>
          <fpage>104527</fpage>
          <pub-id pub-id-type="doi">10.1016/j.ijmedinf.2021.104527</pub-id>
          <pub-id pub-id-type="medline">34186433</pub-id>
          <pub-id pub-id-type="pii">S1386-5056(21)00153-2</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref23">
        <label>23</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Garcia</surname>
              <given-names>SSS</given-names>
            </name>
            <name name-style="western">
              <surname>Santiago</surname>
              <given-names>APD</given-names>
            </name>
            <name name-style="western">
              <surname>Directo</surname>
              <given-names>PMC</given-names>
            </name>
          </person-group>
          <article-title>Evaluation of a Hirschberg test-based application for measuring ocular alignment and detecting strabismus</article-title>
          <source>Curr Eye Res</source>
          <year>2021</year>
          <volume>46</volume>
          <issue>11</issue>
          <fpage>1768</fpage>
          <lpage>1776</lpage>
          <pub-id pub-id-type="doi">10.1080/02713683.2021.1916038</pub-id>
          <pub-id pub-id-type="medline">33856941</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref24">
        <label>24</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Raghuram</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Chinn</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>Smartphone application: repeatability and validity in estimating manifest strabismus on children and adolescents</article-title>
          <source>Invest Ophthalmol Vis Sci</source>
          <year>2020</year>
          <volume>61</volume>
          <issue>7</issue>
          <fpage>2132</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://iovs.arvojournals.org/article.aspx?articleid=2767366"/>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref25">
        <label>25</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Vicini</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Brügger</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Grabe</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Abegg</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Automated measurement of strabismus angle using a commercial virtual reality headset</article-title>
          <source>Klin Monbl Augenheilkd</source>
          <year>2025</year>
          <volume>242</volume>
          <issue>4</issue>
          <fpage>485</fpage>
          <lpage>488</lpage>
          <pub-id pub-id-type="doi">10.1055/a-2466-0284</pub-id>
          <pub-id pub-id-type="medline">39694045</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref26">
        <label>26</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Weber</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Rappoport</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Dysli</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Schmückle Meier</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Marks</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Bockisch</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Landau</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>MacDougall</surname>
              <given-names>HG</given-names>
            </name>
          </person-group>
          <article-title>Strabismus measurements with novel video goggles</article-title>
          <source>Ophthalmology</source>
          <year>2017</year>
          <volume>124</volume>
          <issue>12</issue>
          <fpage>1849</fpage>
          <lpage>1856</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S0161-6420(17)31143-0"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.ophtha.2017.06.020</pub-id>
          <pub-id pub-id-type="medline">28728924</pub-id>
          <pub-id pub-id-type="pii">S0161-6420(17)31143-0</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref27">
        <label>27</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Azri</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Tomietto</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Marciano</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Bui-Quoc</surname>
              <given-names>E</given-names>
            </name>
          </person-group>
          <article-title>Evaluation of Strabocheck® as an objective measurement method of comitant horizontal strabismus in children undergoing surgery</article-title>
          <source>Strabismus</source>
          <year>2023</year>
          <volume>31</volume>
          <issue>2</issue>
          <fpage>73</fpage>
          <lpage>81</lpage>
          <pub-id pub-id-type="doi">10.1080/09273972.2023.2204886</pub-id>
          <pub-id pub-id-type="medline">37199169</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref28">
        <label>28</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wu</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Yang</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Mao</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Feng</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Zou</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Nie</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Yin</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Yang</surname>
              <given-names>Z</given-names>
            </name>
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Shang</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Yang</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>L</given-names>
            </name>
          </person-group>
          <article-title>An artificial intelligence platform for the screening and managing of strabismus</article-title>
          <source>Eye (Lond)</source>
          <year>2024</year>
          <volume>38</volume>
          <issue>16</issue>
          <fpage>3101</fpage>
          <lpage>3107</lpage>
          <pub-id pub-id-type="doi">10.1038/s41433-024-03228-5</pub-id>
          <pub-id pub-id-type="medline">39068250</pub-id>
          <pub-id pub-id-type="pii">10.1038/s41433-024-03228-5</pub-id>
          <pub-id pub-id-type="pmcid">PMC11543679</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref29">
        <label>29</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Luo</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>He</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Bhambhani</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Bachhav</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Pundlik</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Measurement of strabismus using a mobile cover test app</article-title>
          <source>ARVO</source>
          <year>2025</year>
          <volume>66</volume>
          <fpage>1135</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://iovs.arvojournals.org/article.aspx?articleid=2803698"/>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref30">
        <label>30</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Vijendran</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Alok</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Kuzhuppilly</surname>
              <given-names>NIR</given-names>
            </name>
            <name name-style="western">
              <surname>Bhat</surname>
              <given-names>JR</given-names>
            </name>
            <name name-style="western">
              <surname>Kamath</surname>
              <given-names>YS</given-names>
            </name>
          </person-group>
          <article-title>Effectiveness of smartphone technology for detection of paediatric ocular diseases—a systematic review</article-title>
          <source>BMC Ophthalmol</source>
          <year>2025</year>
          <volume>25</volume>
          <issue>1</issue>
          <fpage>323</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://bmcophthalmol.biomedcentral.com/articles/10.1186/s12886-025-04160-2"/>
          </comment>
          <pub-id pub-id-type="doi">10.1186/s12886-025-04160-2</pub-id>
          <pub-id pub-id-type="medline">40448047</pub-id>
          <pub-id pub-id-type="pii">10.1186/s12886-025-04160-2</pub-id>
          <pub-id pub-id-type="pmcid">PMC12123870</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref31">
        <label>31</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wallace</surname>
              <given-names>DK</given-names>
            </name>
            <name name-style="western">
              <surname>Christiansen</surname>
              <given-names>SP</given-names>
            </name>
            <name name-style="western">
              <surname>Sprunger</surname>
              <given-names>DT</given-names>
            </name>
            <name name-style="western">
              <surname>Melia</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>KA</given-names>
            </name>
            <name name-style="western">
              <surname>Morse</surname>
              <given-names>CL</given-names>
            </name>
            <name name-style="western">
              <surname>Repka</surname>
              <given-names>MX</given-names>
            </name>
          </person-group>
          <article-title>Esotropia and exotropia preferred practice Pattern®</article-title>
          <source>Ophthalmology</source>
          <year>2018</year>
          <volume>125</volume>
          <issue>1</issue>
          <fpage>P143</fpage>
          <lpage>P183</lpage>
          <pub-id pub-id-type="doi">10.1016/j.ophtha.2017.10.007</pub-id>
          <pub-id pub-id-type="medline">29108746</pub-id>
          <pub-id pub-id-type="pii">S0161-6420(17)33034-8</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref32">
        <label>32</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Qi</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Tan</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Yao</surname>
              <given-names>Q</given-names>
            </name>
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>YOLO5Face: why reinventing a face detector</article-title>
          <year>2022</year>
          <conf-name>European Conference on Computer Vision</conf-name>
          <conf-date>October 23-27, 2022</conf-date>
          <conf-loc>Tel Aviv, Israel</conf-loc>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://link.springer.com/chapter/10.1007/978-3-031-25072-9_15"/>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref33">
        <label>33</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Newell</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Yang</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Deng</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Stacked hourglass networks for human pose estimation</article-title>
          <year>2016</year>
          <conf-name>Computer Vision—ECCV</conf-name>
          <conf-date>October 11-14, 2016</conf-date>
          <conf-loc>Amsterdam, The Netherlands</conf-loc>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://link.springer.com/chapter/10.1007/978-3-319-46484-8_29"/>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref34">
        <label>34</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Park</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Bulling</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Hilliges</surname>
              <given-names>O</given-names>
            </name>
          </person-group>
          <article-title>Learning to find eye region landmarks for remote gaze estimation in unconstrained settings</article-title>
          <year>2018</year>
          <conf-name>ETRA '18: Proceedings of the 2018 ACM Symposium on Eye Tracking Research &#38; Applications</conf-name>
          <conf-date>June 14, 2018</conf-date>
          <conf-loc>Warsaw, Poland</conf-loc>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://dl.acm.org/doi/10.1145/3204493.3204545"/>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref35">
        <label>35</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kirillov</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Mintun</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Ravi</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Mao</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Rolland</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Gustafson</surname>
              <given-names>L</given-names>
            </name>
          </person-group>
          <article-title>Segment anything</article-title>
          <year>2023</year>
          <conf-name>IEEE/CVF International Conference on Computer Vision (ICCV)</conf-name>
          <conf-date>October 1-6, 2023</conf-date>
          <conf-loc>Paris, France</conf-loc>
          <pub-id pub-id-type="doi">10.1109/ICCV51070.2023.00371</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref36">
        <label>36</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Eskridge</surname>
              <given-names>JB</given-names>
            </name>
            <name name-style="western">
              <surname>Wick</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Perrigin</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <article-title>The Hirschberg test: a double-masked clinical evaluation</article-title>
          <source>Am J Optom Physiol Opt</source>
          <year>1988</year>
          <volume>65</volume>
          <issue>9</issue>
          <fpage>745</fpage>
          <lpage>750</lpage>
          <pub-id pub-id-type="medline">3056019</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref37">
        <label>37</label>
        <nlm-citation citation-type="web">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hammel</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Sullivan-Molina</surname>
              <given-names>N</given-names>
            </name>
          </person-group>
          <article-title>bdhammel/least-squares-ellipse-fitting: initial release</article-title>
          <source>Zenodo</source>
          <year>2019</year>
          <access-date>2019-02-27</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://zenodo.org/records/2578663">https://zenodo.org/records/2578663</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref38">
        <label>38</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Halir</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Flusser</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Numerically stable direct least squares fitting of ellipses</article-title>
          <year>1998</year>
          <conf-name>Proceedings of the 6th International Conference in Central Europe on Computer Graphics and Visualization</conf-name>
          <conf-date>February 13, 1998</conf-date>
          <conf-loc>Pilsen (Plzen), Czech Republic</conf-loc>
          <fpage>125</fpage>
          <lpage>32</lpage>
        </nlm-citation>
      </ref>
      <ref id="ref39">
        <label>39</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Cestari</surname>
              <given-names>DM</given-names>
            </name>
            <name name-style="western">
              <surname>Hunter</surname>
              <given-names>DG</given-names>
            </name>
          </person-group>
          <source>Learning Strabismus Surgery: A Case-Based Approach</source>
          <year>2012</year>
          <publisher-loc>Ambler, PA</publisher-loc>
          <publisher-name>Lippincott Williams &#38; Wilkins</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref40">
        <label>40</label>
        <nlm-citation citation-type="web">
          <article-title>International Vocabulary of Metrology, Fourth edition—(VIM4 CD)</article-title>
          <source>Joint Committee for Guides in Metrology</source>
          <year>2021</year>
          <access-date>2025-10-28</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.bipm.org/documents/20126/54295284/VIM4_CD_210111c.pdf">https://www.bipm.org/documents/20126/54295284/VIM4_CD_210111c.pdf</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref41">
        <label>41</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>de Jongh</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Leach</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Tjon-Fo-Sang</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Bjerre</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Inter-examiner variability and agreement of the alternate prism cover test (APCT) measurements of strabismus performed by 4 examiners</article-title>
          <source>Strabismus</source>
          <year>2014</year>
          <volume>22</volume>
          <issue>4</issue>
          <fpage>158</fpage>
          <lpage>166</lpage>
          <pub-id pub-id-type="doi">10.3109/09273972.2014.972521</pub-id>
          <pub-id pub-id-type="medline">25360761</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref42">
        <label>42</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hatt</surname>
              <given-names>SR</given-names>
            </name>
            <name name-style="western">
              <surname>Leske</surname>
              <given-names>DA</given-names>
            </name>
            <name name-style="western">
              <surname>Liebermann</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Mohney</surname>
              <given-names>BG</given-names>
            </name>
            <name name-style="western">
              <surname>Holmes</surname>
              <given-names>JM</given-names>
            </name>
          </person-group>
          <article-title>Variability of angle of deviation measurements in children with intermittent exotropia</article-title>
          <source>J AAPOS</source>
          <year>2012</year>
          <volume>16</volume>
          <issue>2</issue>
          <fpage>120</fpage>
          <lpage>124</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/22525165"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.jaapos.2011.11.008</pub-id>
          <pub-id pub-id-type="medline">22525165</pub-id>
          <pub-id pub-id-type="pii">S1091-8531(12)00074-2</pub-id>
          <pub-id pub-id-type="pmcid">PMC3895466</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref43">
        <label>43</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <collab>Pediatric Eye Disease Investigator Group</collab>
          </person-group>
          <article-title>Interobserver reliability of the prism and alternate cover test in children with esotropia</article-title>
          <source>Arch Ophthalmol</source>
          <year>2009</year>
          <volume>127</volume>
          <issue>1</issue>
          <fpage>59</fpage>
          <lpage>65</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/19139339"/>
          </comment>
          <pub-id pub-id-type="doi">10.1001/archophthalmol.2008.548</pub-id>
          <pub-id pub-id-type="medline">19139339</pub-id>
          <pub-id pub-id-type="pii">127/1/59</pub-id>
          <pub-id pub-id-type="pmcid">PMC2629143</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref44">
        <label>44</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wong</surname>
              <given-names>DS</given-names>
            </name>
            <name name-style="western">
              <surname>Alsaif</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Bender</surname>
              <given-names>L</given-names>
            </name>
          </person-group>
          <article-title>The role of telemedicine in strabismus assessment: a narrative review and meta-analysis</article-title>
          <source>Telemed J E Health</source>
          <year>2024</year>
          <volume>30</volume>
          <issue>8</issue>
          <fpage>e2240</fpage>
          <lpage>e2255</lpage>
          <pub-id pub-id-type="doi">10.1089/tmj.2024.0115</pub-id>
          <pub-id pub-id-type="medline">38916770</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref45">
        <label>45</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Cheng</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Lynn</surname>
              <given-names>MH</given-names>
            </name>
            <name name-style="western">
              <surname>Pundlik</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Almeida</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Luo</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Houston</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>A smartphone ocular alignment measurement app in school screening for strabismus</article-title>
          <source>BMC Ophthalmol</source>
          <year>2021</year>
          <volume>21</volume>
          <issue>1</issue>
          <fpage>150</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://bmcophthalmol.biomedcentral.com/articles/10.1186/s12886-021-01902-w"/>
          </comment>
          <pub-id pub-id-type="doi">10.1186/s12886-021-01902-w</pub-id>
          <pub-id pub-id-type="medline">33765984</pub-id>
          <pub-id pub-id-type="pii">10.1186/s12886-021-01902-w</pub-id>
          <pub-id pub-id-type="pmcid">PMC7992982</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref46">
        <label>46</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Zhao</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Huang</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Zhu</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Sun</surname>
              <given-names>Q</given-names>
            </name>
            <name name-style="western">
              <surname>Yu</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Zhao</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Han</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Dong</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Ma</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>X</given-names>
            </name>
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Prevalence of strabismus among preschool children in eastern China and comparison at a 5-year interval: a population-based cross-sectional study</article-title>
          <source>BMJ Open</source>
          <year>2021</year>
          <volume>11</volume>
          <issue>10</issue>
          <fpage>e055112</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://bmjopen.bmj.com/lookup/pmidlookup?view=long&#38;pmid=34667017"/>
          </comment>
          <pub-id pub-id-type="doi">10.1136/bmjopen-2021-055112</pub-id>
          <pub-id pub-id-type="medline">34667017</pub-id>
          <pub-id pub-id-type="pii">bmjopen-2021-055112</pub-id>
          <pub-id pub-id-type="pmcid">PMC8527110</pub-id>
        </nlm-citation>
      </ref>
    </ref-list>
  </back>
</article>
