<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "http://dtd.nlm.nih.gov/publishing/2.0/journalpublishing.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article" dtd-version="2.0">
  <front>
    <journal-meta>
      <journal-id journal-id-type="publisher-id">JFR</journal-id>
      <journal-id journal-id-type="nlm-ta">JMIR Form Res</journal-id>
      <journal-title>JMIR Formative Research</journal-title>
      <issn pub-type="epub">2561-326X</issn>
      <publisher>
        <publisher-name>JMIR Publications</publisher-name>
        <publisher-loc>Toronto, Canada</publisher-loc>
      </publisher>
    </journal-meta>
    <article-meta>
      <article-id pub-id-type="publisher-id">v6i4e18222</article-id>
      <article-id pub-id-type="pmid">35451963</article-id>
      <article-id pub-id-type="doi">10.2196/18222</article-id>
      <article-categories>
        <subj-group subj-group-type="heading">
          <subject>Original Paper</subject>
        </subj-group>
        <subj-group subj-group-type="article-type">
          <subject>Original Paper</subject>
        </subj-group>
      </article-categories>
      <title-group>
        <article-title>Integration of Augmented Reality and Brain-Computer Interface Technologies for Health Care Applications: Exploratory and Prototyping Study</article-title>
      </title-group>
      <contrib-group>
        <contrib contrib-type="editor">
          <name>
            <surname>Mavragani</surname>
            <given-names>Amaryllis</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Davison</surname>
            <given-names>Karen</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Ozkan</surname>
            <given-names>Mehmet</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Söbke</surname>
            <given-names>Heinrich</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib id="contrib1" contrib-type="author" corresp="yes">
          <name name-style="western">
            <surname>Andrews</surname>
            <given-names>Anya</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <address>
            <institution>Department of Internal Medicine</institution>
            <institution>College of Medicine</institution>
            <institution>University of Central Florida</institution>
            <addr-line>6900 Lake Nona Blvd</addr-line>
            <addr-line>Orlando, FL, 32827</addr-line>
            <country>United States</country>
            <phone>1 407 266 7077</phone>
            <email>anya.andrews@ucf.edu</email>
          </address>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0001-9205-5276</ext-link>
        </contrib>
      </contrib-group>
      <aff id="aff1">
        <label>1</label>
        <institution>Department of Internal Medicine</institution>
        <institution>College of Medicine</institution>
        <institution>University of Central Florida</institution>
        <addr-line>Orlando, FL</addr-line>
        <country>United States</country>
      </aff>
      <author-notes>
        <corresp>Corresponding Author: Anya Andrews <email>anya.andrews@ucf.edu</email></corresp>
      </author-notes>
      <pub-date pub-type="collection">
        <month>4</month>
        <year>2022</year>
      </pub-date>
      <pub-date pub-type="epub">
        <day>21</day>
        <month>4</month>
        <year>2022</year>
      </pub-date>
      <volume>6</volume>
      <issue>4</issue>
      <elocation-id>e18222</elocation-id>
      <history>
        <date date-type="received">
          <day>12</day>
          <month>2</month>
          <year>2020</year>
        </date>
        <date date-type="rev-request">
          <day>26</day>
          <month>10</month>
          <year>2020</year>
        </date>
        <date date-type="rev-recd">
          <day>28</day>
          <month>1</month>
          <year>2021</year>
        </date>
        <date date-type="accepted">
          <day>24</day>
          <month>1</month>
          <year>2022</year>
        </date>
      </history>
      <copyright-statement>©Anya Andrews. Originally published in JMIR Formative Research (https://formative.jmir.org), 21.04.2022.</copyright-statement>
      <copyright-year>2022</copyright-year>
      <license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/">
        <p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (https://creativecommons.org/licenses/by/4.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in JMIR Formative Research, is properly cited. The complete bibliographic information, a link to the original publication on https://formative.jmir.org, as well as this copyright and license information must be included.</p>
      </license>
      <self-uri xlink:href="https://formative.jmir.org/2022/4/e18222" xlink:type="simple"/>
      <abstract>
        <sec sec-type="background">
          <title>Background</title>
          <p>Augmented reality (AR) and brain-computer interface (BCI) are promising technologies that have a tremendous potential to revolutionize health care. While there has been a growing interest in these technologies for medical applications in the recent years, the combined use of AR and BCI remains a fairly unexplored area that offers significant opportunities for improving health care professional education and clinical practice. This paper describes a recent study to explore the integration of AR and BCI technologies for health care applications.</p>
        </sec>
        <sec sec-type="objective">
          <title>Objective</title>
          <p>The described effort aims to advance an understanding of how AR and BCI technologies can effectively work together to transform modern health care practice by providing new mechanisms to improve patient and provider learning, communication, and shared decision-making.</p>
        </sec>
        <sec sec-type="methods">
          <title>Methods</title>
          <p>The study methods included an environmental scan of AR and BCI technologies currently used in health care, a use case analysis for a combined AR-BCI capability, and development of an integrated AR-BCI prototype solution for health care applications.</p>
        </sec>
        <sec sec-type="results">
          <title>Results</title>
          <p>The study resulted in a novel interface technology solution that enables interoperability between consumer-grade wearable AR and BCI devices and provides the users with an ability to control digital objects in augmented reality using neural commands. The article discusses this novel solution within the context of practical digital health use cases developed during the course of the study where the combined AR and BCI technologies are anticipated to produce the most impact.</p>
        </sec>
        <sec sec-type="conclusions">
          <title>Conclusions</title>
          <p>As one of the pioneering efforts in the area of AR and BCI integration, the study presents a practical implementation pathway for AR-BCI integration and provides directions for future research and innovation in this area.</p>
        </sec>
      </abstract>
      <kwd-group>
        <kwd>digital health</kwd>
        <kwd>augmented reality</kwd>
        <kwd>brain-computer interface</kwd>
        <kwd>health professional education</kwd>
        <kwd>clinical performance support</kwd>
        <kwd>interprofessional teamwork</kwd>
        <kwd>patient education</kwd>
        <kwd>mHealth</kwd>
        <kwd>mobile health</kwd>
        <kwd>technology integration</kwd>
      </kwd-group>
    </article-meta>
  </front>
  <body>
    <sec sec-type="introduction">
      <title>Introduction</title>
      <p>Augmented reality (AR) and brain-computer interface (BCI) technologies are among the most promising technologies to date offering to revolutionize human-computer interaction in health care and health professional education. AR provides a mixed user experience where virtual and real elements seamlessly coexist to allow the user to see the real world supplemented by virtual objects and data. Most modern AR implementations represent a fusion of computer-generated imagery and real environment using a head-mounted display (HMD) or goggles typically used in gaming, maintenance training, rehabilitation, and surgical performance support. An HMD allows the users to maintain a clear line-of-sight alignment with real elements in the actual environment, which can be beneficial in any data-intensive setting [<xref ref-type="bibr" rid="ref1">1</xref>]. This is particularly important in clinical environments where a physician’s situational awareness depends on multiple sources of information and simultaneously maintaining effective communications and eye contact with members of the clinical team as well as the patients [<xref ref-type="bibr" rid="ref1">1</xref>,<xref ref-type="bibr" rid="ref2">2</xref>].</p>
      <p>The BCI represents a communication pathway between the brain and a computer device using a variety of biosensors that gather and interpret the signals from body and mind to enable neural controls over computer functions. The BCI has been used in a wide variety of applications, including rehabilitation, robotics, entertainment, and virtual reality [<xref ref-type="bibr" rid="ref3">3</xref>-<xref ref-type="bibr" rid="ref6">6</xref>]. No longer seen as a purely assistive technology, BCI has been gaining interest as a noninvasive physiological observation mechanism applicable to health care and education settings [<xref ref-type="bibr" rid="ref7">7</xref>]. AR provides an opportunity to integrate feedback into a real-world environment and enhance a user experience by advancing human-computer interaction capabilities, while the BCI enables a new hands-free interaction modality and provides information about the user’s mental state, which supports adaptive training and performance improvement [<xref ref-type="bibr" rid="ref8">8</xref>,<xref ref-type="bibr" rid="ref9">9</xref>].</p>
      <p>While there has been a growing interest in the AR and BCI technologies in the recent years, the combined use of these technologies remains a relatively uncharted territory both in research and practice. The last decade has brought significant advancements in the area of AR and BCI technologies; however, both of them still exist in a relative isolation from each other. While the idea of bringing these two technologies together has prevailed among futurists, technology enthusiasts, and government research silos for quite some time, it still remains a fairly unexplored area, which had been associated with the realm of science fiction requiring paradigm shifts in digital innovation dynamics [<xref ref-type="bibr" rid="ref10">10</xref>].</p>
      <p>As the researchers and consumers are starting to recognize the benefits for combining BCI and AR fields, this interest continues to fuel the innovation around improved technology interaction and visualization capabilities. There is a growing body of literature suggesting the potential to revolutionize health care through the use of emerging AR and BCI technologies, with a few intriguing examples starting to demonstrate the applicability of these emerging technologies in a variety of health care contexts; for example, surgery, ophthalmology, elderly care, sensory system rehabilitation, and others [<xref ref-type="bibr" rid="ref8">8</xref>,<xref ref-type="bibr" rid="ref11">11</xref>-<xref ref-type="bibr" rid="ref13">13</xref>]. Despite the growing interest in the AR and BCI technologies, the research in this area is currently somewhat fragmented, and the awareness of the true potential of AR and BCI is still rather limited [<xref ref-type="bibr" rid="ref1">1</xref>,<xref ref-type="bibr" rid="ref14">14</xref>].</p>
      <p>A combination of AR and BCI technologies can offer an enhanced user experience both for patients and health care professionals, particularly from procedure-intensive specialties, by allowing them to interact with a mix of real and virtual objects, contextual elements, and each other, while using the BCI as an additional communication vehicle, besides the spoken word and hand gesture traditionally used in AR. For instance, through interactive 3D visualizations, an AR component can be used to help a health professional explain a disease or a medical procedure to a patient during a clinical encounter or provide visual cues to a physician during a complex procedure, while a BCI component can simultaneously use biosensors, such as an electroencephalogram (EEG), to enhance the range of options for performing clinical tasks through a combination of verbal, tactile, and neural triggers as well as provide new information about the user’s mental state.</p>
      <p>This paper presents the results of a recent effort aimed to advance an understanding of how AR and BCI technologies can work together to transform modern health professional education and clinical practice by providing practical mechanisms to support the established principles of patient-centered care [<xref ref-type="bibr" rid="ref15">15</xref>] as they relate to patient safety, effective patient-provider communication, shared decision-making, and patient education. The aim of this study was to explore the integration of commercially available wearable AR and BCI technologies that can be applied in medical education, clinical practice, and other areas to address a variety of real-world challenges in health care. The study produced a novel integrated AR-BCI technology solution, which was demonstrated within the context of practical use cases focused on health professional education and clinical performance support.</p>
    </sec>
    <sec sec-type="methods">
      <title>Methods</title>
      <sec>
        <title>Methods Overview</title>
        <p>The study methodology included an environmental scan and analysis of modern AR and BCI technologies, a use case analysis of practical applications for combined use of AR-BCI technologies in health care, and the development of a proof-of-concept AR-BCI technology integration prototype situated within the modeled use case scenarios, as summarized in the following paragraphs.</p>
      </sec>
      <sec>
        <title>Environmental Scan and Analysis of Modern AR and BCI Technologies in Health Care</title>
        <p>The environmental scan [<xref ref-type="bibr" rid="ref16">16</xref>] and analysis component constituted a broad-scale review of existing applications of AR and BCI technologies in health care through literature review, research and industry reports, technology demonstrations in health care settings, and other sources. The literature review included publications identified from health care and technology research databases (eg, PubMed, IEEE, EBSCO, and others) using the following Medical Subject Headings (MeSH) terms: “augmented reality,” “brain-computer interfacing,” “healthcare,” “clinical performance support,” and “health professional education.” The survey of industry reports and technology demonstrations was performed at a number of major technology innovation venues (eg, Consumer Electronics Show, Interservice/Industry, Simulation, Training, and Education Conference, Healthcare Information and Management Society, International Meeting for Simulation in Healthcare, and others). The environmental scan revealed the strong potential for bringing the AR and BCI technologies for health care applications, particularly within the context of complex medical interventions and treatment planning (eg, surgery, invasive testing procedures, intensive therapies, and others). Leveraging the combination of AR and BCI in such cases would help improve communication and shared decision-making between providers and patients as well as members of an interprofessional team. At the same time, the environmental scan has confirmed that while the use of AR and BCI technologies in health care is growing, their combined use remains an unexplored area where the majority of innovations currently reside in research laboratories and apply to a limited range of clinical applications and disease conditions.</p>
      </sec>
      <sec>
        <title>Use Case Analysis of Practical Applications for Combined AR-BCI Technologies in Health Care</title>
        <p>To explore the potential for the combined use of AR and BCI technologies in health care, a use case analysis technique [<xref ref-type="bibr" rid="ref17">17</xref>] was used, which helped identify the requirements for the AR-BCI within the context of practical health care applications. Through multidisciplinary collaboration with experts from health professional education, clinical sciences, and computer and cognitive sciences, a series of use cases were developed, which focused on the following key areas where the combined use of AR-BCI technologies can produce the most impact:</p>
        <list list-type="bullet">
          <list-item>
            <p>Medical or health professional education</p>
          </list-item>
          <list-item>
            <p>Patient education</p>
          </list-item>
          <list-item>
            <p>Patient-provider communication</p>
          </list-item>
          <list-item>
            <p>Shared decision-making</p>
          </list-item>
          <list-item>
            <p>Clinical performance support</p>
          </list-item>
          <list-item>
            <p>Interprofessional teamwork</p>
          </list-item>
        </list>
        <p>These use cases provided the basis for modeling the simulation scenarios used to demonstrate and validate the AR-BCI proof-of-concept technology, which is described in the <italic>Results</italic> section.</p>
      </sec>
      <sec>
        <title>AR-BCI Technology Integration Prototype Development</title>
        <p>The prototype development effort involved a proof-of-concept integration of AR-BCI technologies with the intent of demonstrating the potential of the combined technologies within the context of the practical use cases and serve as a test bed for future use case scenarios and implementation. The prototype development effort focused on the integration of commercially available consumer AR and BCI devices to minimize the common barriers associated with the use of specialized technologies, which frequently stand in the way of technology implementation and adoption.</p>
        <p>The proof-of-concept AR-BCI integration was performed using Microsoft HoloLens as the AR technology component and NeuroSky Mind Wave 2 as the BCI component. While neither of the two devices were designed to work together “out of the box” in a plug-and-play fashion, they do come equipped with a software development kit and application programming interface components, which make integration with other technology platforms and devices possible in principle. It is important to note, however, that coupling these devices involved a technology development and programming effort to create a software interface to enable the communication between them. A WebSocket relay server was implemented as an intermediary component between HoloLens and NeuroSky Mobile Wave 2 because both devices support the internet connection over an HTTPS internet protocol. The WebSocket protocol was selected on the basis of its effective real-time performance as a relay messenger, which, in this case, listened for the messages from the BCI device (NeuroSky) and relayed them to the connected AR device (HoloLens). As part of the prototype validation efforts, this technology integration method was also successfully coupled with other consumer-grade BCI or neurosensing devices, such as MUSE, and is currently being extended to other AR devices, such as Magic Leap and MERGE.</p>
      </sec>
    </sec>
    <sec sec-type="results">
      <title>Results</title>
      <p>The principal outcome of this exploratory assessment and prototype development is that the new technology interface that resulted from it enables the coupling and communication between these devices—a capability that previously did not exist. Specifically, the study resulted in a proof-of-concept integration of mainstream consumer AR and BCI technologies and development of a novel integrated technology platform to demonstrate their potential within the context of medical education and clinical performance support use cases and serve as a test bed, based on which future developments can be performed and evaluated.</p>
      <p>Named “<italic>Augmented Reality and Neurosensing Interaction Environment (ARNIE)</italic>,” the resultant solution represents a flexible AR-BCI interface platform and a technology test bed that is specifically intended for consumer-grade devices and is technology brand–agnostic. This solution is designed to enable an enhanced learning experience for health professionals in training and enhanced clinical experience for patients and physicians. The ARNIE platform currently enables thought-controlled manipulation of multiple virtual and data objects in AR similar to a simple computer “mouse click.” <xref rid="figure1" ref-type="fig">Figure 1</xref> illustrates a working model of the ARNIE system within the context of a medical education use case represented via two learning modules—one focused on the cardiovascular disease and the other one on Crohn disease.</p>
      <p>The ARNIE system enables the 3D models visualizing a human heart and a gastrointestinal tract to be controlled by a neurosensing BCI component using a unique communication protocol developed to enable coupling of AR-BCI components. As pictured in <xref rid="figure1" ref-type="fig">Figure 1</xref>, the student demonstrates wearing an AR headset integrated with a BCI headband and controls the virtual objects—that is, the human heart and gastrointestinal tract—in the AR environment entirely with his thoughts. Specifically, by concentrating on a particular virtual object—for example, “the heart” presented in the AR environment—the student’s EEG waves are captured by the BCI headband, and upon reaching a predetermined attention measure threshold, send a signal using the communication protocol to launch a training video illustrating the functions of the cardiovascular system.</p>
      <fig id="figure1" position="float">
        <label>Figure 1</label>
        <caption>
          <p>Augmented reality and brain-computer interface (AR-BCI) proof-of-concept integration prototype: medical education use case. ARNIE: Augmented Reality and Neurosensing Interaction Environment, UCF: University of Central Florida.</p>
        </caption>
        <graphic xlink:href="formative_v6i4e18222_fig1.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
      </fig>
      <p>Besides the education and training use cases, the potential of the integrated AR-BCI solution was explored within the context of clinical performance support modeled in a health care environment. Thus, <xref rid="figure2" ref-type="fig">Figure 2</xref> below illustrates a clinical scenario where the integrated AR-BCI capability would allow the physician performing a clinical procedure (eg, an ultrasound of the heart) to control certain clinical devices and systems using neural triggers (ie, attention and concentration), use shared visualizations with the patient (also wearing the AR-BCI devices) to promote effective communication and shared decision-making, while also maintaining enhanced clinical awareness of the patient’s mental state enabled by the patient’s BCI component, which can also be used to provide biofeedback to patient, as needed.</p>
      <fig id="figure2" position="float">
        <label>Figure 2</label>
        <caption>
          <p>Integrated augmented reality and brain-computer interface (AR-BCI) use case: clinical performance support. UCF: University of Central Florida.</p>
        </caption>
        <graphic xlink:href="formative_v6i4e18222_fig2.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
      </fig>
      <p>The clinical applications of the integrated solution can also augment inteprofessional teamwork by allowing members of the health care team to control devices and data in the clinical environment via neural triggers, which would help maintain situational awareness and promote shared mental models and shared decision-making as illustrated in <xref rid="figure3" ref-type="fig">Figure 3</xref>.</p>
      <fig id="figure3" position="float">
        <label>Figure 3</label>
        <caption>
          <p>Integrated augmented reality and brain-computer interface (AR-BCI) use case: interprofessional teamwork. UCF: University of Central Florida.</p>
        </caption>
        <graphic xlink:href="formative_v6i4e18222_fig3.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
      </fig>
      <p>Shared mental models are shared cognitive structures that enable members of an interprofessional team to function collaboratively through implicit coordination while performing clinical tasks that require coordination, cooperation, and mutual support. Shared mental models represent a key prerequisite for shared decision-making in inteprofessional teamwork.</p>
      <p>The medical education, clinical performance support, and interprofessional teamwork use case scenarios described above were modeled within the academic technology research and clinical skills simulation environment and provided the context for demonstrating the potential of the integrated AR-BCI prototype solution. The medical education use case scenario illustrated in <xref rid="figure1" ref-type="fig">Figure 1</xref> has served as the primary mechanism for demonstrating and validating the functionality of the prototype. The initial testing and validation of the technology prototype was performed within a small group of approximately 10 health care simulation and multimedia technology experts familiar with a variety of both AR and BCI platforms whose formative inputs helped calibrate the prototype for broader testing and implementation.</p>
      <p>After the initial testing by the technology experts, the prototype has been demonstrated to a diverse mix of 2500 potential user representatives, including students, educators, and health care and technology professionals, approximately 600 of whom volunteered to further examine and perform hands-on testing of the integrated AR-BCI solution within the simulation laboratory. The testing volunteers were instructed to don both the HoloLens and NeuroSky Mind Wave 2 devices simultaneously and concentrate their attention on specific 3D objects within the AR environment; for example, “the heart” in order to activate the learning content associated with these objects via neural triggers. All of the volunteers have been able to go through the use case scenarios using the two devices without any problems, which indicates that the technical targets of the integrated AR-BCI system and its individual components have been met under basic operating conditions within the testing environment. The volunteers’ reactions and feedback regarding the overall AR-BCI integration concept and the experience using the prototype have been overwhelmingly positive, ranging from comparisons with science fiction coming to real life to sincere expressions of awe from being introduced to a novel capability that holds tremendous potential for health care applications.</p>
    </sec>
    <sec sec-type="discussion">
      <title>Discussion</title>
      <sec>
        <title>Principal Findings</title>
        <p>This study has successfully proved the concept of integrating commercially available consumer AR and BCI technologies. The results of the described effort present new advancements in the areas of cognitive and computer sciences by providing new capabilities for (1) human-machine interfacing, (2) advanced technology interoperability and Internet of Things networking, (3) multimodal data analytics, and (4) smart and mobile learning technologies in health care. These new capabilities have been realized within the ARNIE technology interface solution that enables interoperability between consumer-grade AR and BCI devices demonstrated within the context of the practical use cases for health professional education and clinical performance support. The proof-of-concept demonstration scenarios involved participation of a broad community of potential end users, including physicians, allied health professionals, medical students, technologists, scientists and researchers, health care administrators who helped validate the integrated AR-BCI technology capability and provided early feedback regarding the prototype, which has been consistently optimistic and supportive in terms of its perceived usability and utility, and also encouraging in terms of its broader testing and implementation in real-life settings.</p>
        <p>The resultant new technology solution offers a research test bed and enabler for advancing knowledge and understanding about human-computer interaction in health care by creating an opportunity to connect people (health professionals, patients, and trainees), systems, and data in health care environments through a combination of AR and BCI. This test bed represents a foundation for moving toward plug-and-play integrated AR-BCI technology interoperability, which currently represents a significant barrier to adoption of these new technologies for health care applications.</p>
      </sec>
      <sec>
        <title>Limitations</title>
        <p>The study represents an exploratory and developmental technology integration and use case modeling effort, which, thus far, has been successfully demonstrated and implemented in a simulation-based education and research setting, but not in an actual health care delivery environment. The focus of the effort was on technology development and proof-of-concept demonstration of the integrated AR-BCI capability within the realistic use case scenarios. The volunteer participant interaction with the platform was not systematic, and their feedback is considered informal.</p>
      </sec>
      <sec>
        <title>Comparison With Prior Work</title>
        <p>The following three major differentiators of the described study from the current state of the science in this area can be distinguished:</p>
        <sec>
          <title>Expanded Range of Controls for AR Technology Interaction</title>
          <p>The world of human-machine interfaces is rapidly transitioning from legacy physical instrumentation to a world driven by gesture, spoken word, and now neural command, which is likely to become far more precise than gesture or spoken word in the future. The results of this study demonstrate an expanded range of AR controls, which includes a neurosensing capability that allows the users to control digital objects in an AR environment using the power of their mind.</p>
        </sec>
        <sec>
          <title>Consumer-Grade AR-BCI Technologies and Broader Application Focus</title>
          <p>A growing body of research suggests the great potential to revolutionize human-computer interaction through the use of emerging AR and BCI technologies, with several early examples starting to demonstrate the applicability of these emerging technologies in a variety of health care contexts, including prosthetic interfaces [<xref ref-type="bibr" rid="ref18">18</xref>], sensory system rehabilitation [<xref ref-type="bibr" rid="ref19">19</xref>], behavioral health [<xref ref-type="bibr" rid="ref20">20</xref>], and others. However, many existing combined AR-BCI implementations use highly specialized technologies and devices that are custom-built for a narrow focus, experimental in nature, and may not be easily adaptable or extensible for wider use, which makes it challenging for them to cross a so-called technology adoption chasm [<xref ref-type="bibr" rid="ref21">21</xref>] in order to enter mainstream use any time soon. The ARNIE solution brings together the AR-BCI capabilities using the consumer-grade technology and devices available today with the intent to accelerate the transition of this new integrated capability to the consumers in health care in the near future.</p>
        </sec>
        <sec>
          <title>Technology-Agnostic Solution</title>
          <p>The described effort represents a new step toward bringing the AR-BCI technologies together in a device-agnostic way via a novel interface solution that enables communication between consumer-grade wearable AR and BCI devices and provides the user an ability to control digital objects in AR using neural commands. Envisioned to promote effective communication and shared decision-making between health care providers and patients, this new interface represents an extensible and device-agnostic test bed for evaluating future development efforts in this area. Intended to support a wide range of AR and BCI devices, the technology-agnostic integration approach will help promote the adoption of integrated AR-BCI technologies in health care.</p>
        </sec>
      </sec>
      <sec>
        <title>Future Directions</title>
        <p>Next steps in this direction would include the development of an expanded set of integrated AR-BCI capabilities and include further testing and implementation of this platform within the health professional education curriculum of the University of Central Florida (UCF) Academic Health Sciences Center. Further research efforts need to advance an understanding about how AR and BCI technologies can be used to (1) facilitate or enhance shared understanding between human agents in health care, (2) support teaching and learning of complex biomedical and medical information, (3) enable strategies to capture mental states and promote metacognition and comprehension in medical trainees and patient populations, and (4) support the identification of neural signatures of complex cognitive, metacognitive, and affective processes during clinical training and performance.</p>
      </sec>
      <sec>
        <title>Broader Impacts</title>
        <p>There are many opportunities for broader impacts from the application of integrated AR-BCI solutions in health care, including patient and provider education, physician clinical performance support, interprofessional teamwork, rehabilitation, wellness, telemedicine, and research, as summarized in <xref rid="figure4" ref-type="fig">Figure 4</xref>. The results of this study can play an important role in transforming health professions education by enabling the integration of people, systems, and data during the learning process. They can also serve as a powerful enabler for enhanced clinical experiences for patients and providers by improving communication, shared decision-making, and patient health literacy, all of which are associated with patient-centered care [<xref ref-type="bibr" rid="ref22">22</xref>,<xref ref-type="bibr" rid="ref23">23</xref>]. The clinical applications of the integrated solution can also augment interprofessional teamwork and help clinical teams maintain situational awareness and shared mental models, which also have a direct impact on patient health outcomes [<xref ref-type="bibr" rid="ref24">24</xref>]. Additionally, the integration of AR and BCI technologies offers unique opportunities for supporting telemedicine and remote rehabilitation and wellness approaches, which have become particularly important during the COVID-19 pandemic. Finally, the AR-BCI value proposition includes opportunities to advance research in the areas of health care quality improvement and the other health care application areas highlighted in <xref rid="figure4" ref-type="fig">Figure 4</xref>.</p>
        <fig id="figure4" position="float">
          <label>Figure 4</label>
          <caption>
            <p>Augmented reality and brain-computer interface (AR-BCI) integration for health care applications: value proposition.</p>
          </caption>
          <graphic xlink:href="formative_v6i4e18222_fig4.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <p>This described study offers a foundation for future efforts to accelerate the integration of AR and BCI technologies to connect people, data, and systems to enable transformation in health and medicine. As our society continues to become more diverse and global, every effort should be made toward the development of shared understanding in health care [<xref ref-type="bibr" rid="ref25">25</xref>]. The results of this study and future efforts in this area should help promote shared understanding between health professionals and patients from all walks of life, including underrepresented minorities, patients with limited education background, immigrants, patients in rural communities, and others. A famous quote by a historical figure in medicine, Martin H Fischer, stating that “<italic>In the sick room, ten cents' worth of human understanding equals ten dollars' worth of medical science</italic>,” still holds true today, particularly as patient-centered care remains one of the fundamental aims of the US health care system. Whether it is about communication between providers and patients or between members of an interprofessional team, the development of shared understanding and mental models is an important prerequisite and enabler for shared decision-making.</p>
      </sec>
      <sec>
        <title>Conclusions</title>
        <p>Technology-based innovations can serve as a game-changer for supporting patient and provider education, communication, and shared decision-making, which would improve care and engagement of patients and ultimately population health [<xref ref-type="bibr" rid="ref22">22</xref>]. Through the purposeful integration of multiple disciplines, including cognitive and computer sciences, engineering, and medicine, this study explored the integration of wearable AR and BCI technologies and resulted in a novel integrated AR and BCI technology solution and test bed that brings together two of the most promising technologies to date, both of which have tremendous potential to revolutionize health care. Broad applications for this technology are anticipated in health professions education, clinical performance support, surgery, telemedicine, and patient education. Future research directions in this area should aim to expand the range of current AR-BCI capabilities while addressing existing technical challenges and also generate new evidence on human-computer interaction in health care and health professional education.</p>
      </sec>
    </sec>
  </body>
  <back>
    <app-group/>
    <glossary>
      <title>Abbreviations</title>
      <def-list>
        <def-item>
          <term id="abb1">AR</term>
          <def>
            <p>augmented reality</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb2">ARNIE</term>
          <def>
            <p>Augmented Reality and Neurosensing Interaction Environment</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb3">BCI</term>
          <def>
            <p>brain-computer interface</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb4">EEG</term>
          <def>
            <p>electroencephalogram</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb5">HMD</term>
          <def>
            <p>head-mounted display</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb6">UCF</term>
          <def>
            <p>University of Central Florida</p>
          </def>
        </def-item>
      </def-list>
    </glossary>
    <ack>
      <p>This study was funded by a seed grant program sponsored by the Department of Internal Medicine, University of Central Florida College of Medicine. I would like to thank Dr David Metcalf and Dr Analia Castiglioni for scholarly collaboration, Mr Mike Eakins and Mr Jarod Smith for technical contributions, the UCF COM EdTech team for digital photography, and Dr Edward Ross for departmental support during the study.</p>
    </ack>
    <fn-group>
      <fn fn-type="conflict">
        <p>None declared.</p>
      </fn>
    </fn-group>
    <ref-list>
      <ref id="ref1">
        <label>1</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Vávra</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Roman</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Zonča</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Ihnát</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Němec</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Kumar</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Habib</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>El-Gendi</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Recent Development of Augmented Reality in Surgery: A Review</article-title>
          <source>J Healthc Eng</source>
          <year>2017</year>
          <volume>2017</volume>
          <fpage>4574172</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1155/2017/4574172"/>
          </comment>
          <pub-id pub-id-type="doi">10.1155/2017/4574172</pub-id>
          <pub-id pub-id-type="medline">29065604</pub-id>
          <pub-id pub-id-type="pmcid">PMC5585624</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref2">
        <label>2</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lahanas</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Loukas</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Smailis</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Georgiou</surname>
              <given-names>E</given-names>
            </name>
          </person-group>
          <article-title>A novel augmented reality simulator for skills assessment in minimal invasive surgery</article-title>
          <source>Surg Endosc</source>
          <year>2015</year>
          <month>08</month>
          <volume>29</volume>
          <issue>8</issue>
          <fpage>2224</fpage>
          <lpage>2234</lpage>
          <pub-id pub-id-type="doi">10.1007/s00464-014-3930-y</pub-id>
          <pub-id pub-id-type="medline">25303925</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref3">
        <label>3</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Alimardani</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Hiraki</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>Passive Brain-Computer Interfaces for Enhanced Human-Robot Interaction</article-title>
          <source>Front Robot AI</source>
          <year>2020</year>
          <volume>7</volume>
          <fpage>125</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.3389/frobt.2020.00125"/>
          </comment>
          <pub-id pub-id-type="doi">10.3389/frobt.2020.00125</pub-id>
          <pub-id pub-id-type="medline">33501291</pub-id>
          <pub-id pub-id-type="pmcid">PMC7805996</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref4">
        <label>4</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mane</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Chouhan</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Guan</surname>
              <given-names>C</given-names>
            </name>
          </person-group>
          <article-title>BCI for stroke rehabilitation: motor and beyond</article-title>
          <source>J Neural Eng</source>
          <year>2020</year>
          <month>08</month>
          <day>17</day>
          <volume>17</volume>
          <issue>4</issue>
          <fpage>041001</fpage>
          <pub-id pub-id-type="doi">10.1088/1741-2552/aba162</pub-id>
          <pub-id pub-id-type="medline">32613947</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref5">
        <label>5</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mudgal</surname>
              <given-names>SK</given-names>
            </name>
            <name name-style="western">
              <surname>Sharma</surname>
              <given-names>SK</given-names>
            </name>
            <name name-style="western">
              <surname>Chaturvedi</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Sharma</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Brain computer interface advancement in neurosciences: Applications and issues</article-title>
          <source>Interdiscip Neurosurg</source>
          <year>2020</year>
          <month>06</month>
          <volume>20</volume>
          <fpage>100694</fpage>
          <pub-id pub-id-type="doi">10.1016/j.inat.2020.100694</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref6">
        <label>6</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wen</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Liang</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Zhou</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Jung</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>The Current Research of Combining Multi-Modal Brain-Computer Interfaces With Virtual Reality</article-title>
          <source>IEEE J Biomed Health Inform</source>
          <year>2021</year>
          <month>09</month>
          <volume>25</volume>
          <issue>9</issue>
          <fpage>3278</fpage>
          <lpage>3287</lpage>
          <pub-id pub-id-type="doi">10.1109/JBHI.2020.3047836</pub-id>
          <pub-id pub-id-type="medline">33373308</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref7">
        <label>7</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Galway</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>McCullagh</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Lightbody</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Brennan</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Trainor</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <article-title>The Potential of the Brain-Computer Interface for Learning: A Technology Review</article-title>
          <year>2015</year>
          <conf-name>2015 IEEE International Conference on Computer and Information Technology; Ubiquitous Computing and Communications; Dependable, Autonomic and Secure Computing; Pervasive Intelligence and Computing</conf-name>
          <conf-date>October 26-28, 2015</conf-date>
          <conf-loc>Liverpool</conf-loc>
          <pub-id pub-id-type="doi">10.1109/cit/iucc/dasc/picom.2015.234</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref8">
        <label>8</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Si-Mohammed</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Petit</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Jeunet</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Argelaguet</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Spindler</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Evain</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Roussel</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Casiez</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Lecuyer</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Towards BCI-Based Interfaces for Augmented Reality: Feasibility, Design and Evaluation</article-title>
          <source>IEEE Trans Vis Comput Graph</source>
          <year>2020</year>
          <month>03</month>
          <volume>26</volume>
          <issue>3</issue>
          <fpage>1608</fpage>
          <lpage>1621</lpage>
          <pub-id pub-id-type="doi">10.1109/TVCG.2018.2873737</pub-id>
          <pub-id pub-id-type="medline">30295623</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref9">
        <label>9</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Acar</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Miman</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Akirmak</surname>
              <given-names>OO</given-names>
            </name>
          </person-group>
          <article-title>Treatment of Anxiety Disorders Patients through EEG and Augmented Reality</article-title>
          <source>Eur Soc Sci Res J</source>
          <year>2014</year>
          <volume>3</volume>
          <issue>2</issue>
          <fpage>18</fpage>
          <lpage>27</lpage>
        </nlm-citation>
      </ref>
      <ref id="ref10">
        <label>10</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mainzer</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>From Augmented Reality to the Internet of Things: Paradigm Shifts in Digital Innovation Dynamics</article-title>
          <source>Augmented Reality</source>
          <year>2017</year>
          <publisher-loc>Berlin</publisher-loc>
          <publisher-name>De Gruyter</publisher-name>
          <fpage>25</fpage>
          <lpage>40</lpage>
        </nlm-citation>
      </ref>
      <ref id="ref11">
        <label>11</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Takano</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Hata</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Kansaku</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>Towards intelligent environments: an augmented reality-brain-machine interface operated with a see-through head-mount display</article-title>
          <source>Front Neurosci</source>
          <year>2011</year>
          <volume>5</volume>
          <fpage>60</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.3389/fnins.2011.00060"/>
          </comment>
          <pub-id pub-id-type="doi">10.3389/fnins.2011.00060</pub-id>
          <pub-id pub-id-type="medline">21541307</pub-id>
          <pub-id pub-id-type="pmcid">PMC3082767</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref12">
        <label>12</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Blum</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Stauder</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Euler</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Navab</surname>
              <given-names>N</given-names>
            </name>
          </person-group>
          <article-title>Superman-like X-ray vision: Towards brain-computer interfaces for medical augmented reality</article-title>
          <year>2012</year>
          <conf-name>2012 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)</conf-name>
          <conf-date>November 5-8, 2012</conf-date>
          <conf-loc>Atlanta, GA</conf-loc>
          <pub-id pub-id-type="doi">10.1109/ismar.2012.6402569</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref13">
        <label>13</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Barresi</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Olivieri</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Caldwell</surname>
              <given-names>DG</given-names>
            </name>
            <name name-style="western">
              <surname>Mattos</surname>
              <given-names>LS</given-names>
            </name>
          </person-group>
          <article-title>Brain-Controlled AR Feedback Design for User's Training in Surgical HRI</article-title>
          <year>2015</year>
          <conf-name>2015 IEEE International Conference on Systems, Man, and Cybernetics</conf-name>
          <conf-date>October 9-12, 2015</conf-date>
          <conf-loc>Hong Kong</conf-loc>
          <pub-id pub-id-type="doi">10.1109/smc.2015.200</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref14">
        <label>14</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Barsom</surname>
              <given-names>EZ</given-names>
            </name>
            <name name-style="western">
              <surname>Graafland</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Schijven</surname>
              <given-names>MP</given-names>
            </name>
          </person-group>
          <article-title>Systematic review on the effectiveness of augmented reality applications in medical training</article-title>
          <source>Surg Endosc</source>
          <year>2016</year>
          <month>10</month>
          <volume>30</volume>
          <issue>10</issue>
          <fpage>4174</fpage>
          <lpage>4183</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="http://europepmc.org/abstract/MED/26905573"/>
          </comment>
          <pub-id pub-id-type="doi">10.1007/s00464-016-4800-6</pub-id>
          <pub-id pub-id-type="medline">26905573</pub-id>
          <pub-id pub-id-type="pii">10.1007/s00464-016-4800-6</pub-id>
          <pub-id pub-id-type="pmcid">PMC5009168</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref15">
        <label>15</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Davis</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Schoenbaum</surname>
              <given-names>SC</given-names>
            </name>
            <name name-style="western">
              <surname>Audet</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>A 2020 vision of patient-centered primary care</article-title>
          <source>J Gen Intern Med</source>
          <year>2005</year>
          <month>10</month>
          <volume>20</volume>
          <issue>10</issue>
          <fpage>953</fpage>
          <lpage>957</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://onlinelibrary.wiley.com/resolve/openurl?genre=article&#38;sid=nlm:pubmed&#38;issn=0884-8734&#38;date=2005&#38;volume=20&#38;issue=10&#38;spage=953"/>
          </comment>
          <pub-id pub-id-type="doi">10.1111/j.1525-1497.2005.0178.x</pub-id>
          <pub-id pub-id-type="medline">16191145</pub-id>
          <pub-id pub-id-type="pii">JGI178</pub-id>
          <pub-id pub-id-type="pmcid">PMC1490238</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref16">
        <label>16</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Graham</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Evitts</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Thomas-MacLean</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>Environmental scans: how useful are they for primary care research?</article-title>
          <source>Can Fam Physician</source>
          <year>2008</year>
          <month>07</month>
          <volume>54</volume>
          <issue>7</issue>
          <fpage>1022</fpage>
          <lpage>1023</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="http://www.cfp.ca/cgi/pmidlookup?view=long&#38;pmid=18625830"/>
          </comment>
          <pub-id pub-id-type="medline">18625830</pub-id>
          <pub-id pub-id-type="pii">54/7/1022</pub-id>
          <pub-id pub-id-type="pmcid">PMC2464800</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref17">
        <label>17</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Sharp</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Requirements modeling with use cases and services</article-title>
          <source>Workflow Modeling: Tools for Process Improvement and Applications Development</source>
          <year>2009</year>
          <publisher-loc>Norwood, MA</publisher-loc>
          <publisher-name>Artech House</publisher-name>
          <fpage>375</fpage>
          <lpage>422</lpage>
        </nlm-citation>
      </ref>
      <ref id="ref18">
        <label>18</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Zeng</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Wu</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Song</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Ji</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Xu</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Zhu</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Li</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Wen</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <article-title>Closed-Loop Hybrid Gaze Brain-Machine Interface Based Robotic Arm Control with Augmented Reality Feedback</article-title>
          <source>Front Neurorobot</source>
          <year>2017</year>
          <volume>11</volume>
          <fpage>60</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.3389/fnbot.2017.00060"/>
          </comment>
          <pub-id pub-id-type="doi">10.3389/fnbot.2017.00060</pub-id>
          <pub-id pub-id-type="medline">29163123</pub-id>
          <pub-id pub-id-type="pmcid">PMC5671634</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref19">
        <label>19</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Cervera</surname>
              <given-names>MA</given-names>
            </name>
            <name name-style="western">
              <surname>Soekadar</surname>
              <given-names>SR</given-names>
            </name>
            <name name-style="western">
              <surname>Ushiba</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Millán</surname>
              <given-names>JDR</given-names>
            </name>
            <name name-style="western">
              <surname>Liu</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Birbaumer</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Garipelli</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>Brain-computer interfaces for post-stroke motor rehabilitation: a meta-analysis</article-title>
          <source>Ann Clin Transl Neurol</source>
          <year>2018</year>
          <month>05</month>
          <volume>5</volume>
          <issue>5</issue>
          <fpage>651</fpage>
          <lpage>663</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1002/acn3.544"/>
          </comment>
          <pub-id pub-id-type="doi">10.1002/acn3.544</pub-id>
          <pub-id pub-id-type="medline">29761128</pub-id>
          <pub-id pub-id-type="pii">ACN3544</pub-id>
          <pub-id pub-id-type="pmcid">PMC5945970</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref20">
        <label>20</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Luxton</surname>
              <given-names>DD</given-names>
            </name>
            <name name-style="western">
              <surname>June</surname>
              <given-names>JD</given-names>
            </name>
            <name name-style="western">
              <surname>Sano</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Bickmore</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>Intelligent Mobile, Wearable, and Ambient Technologies for Behavioral Health Care</article-title>
          <source>Artificial Intelligence in Behavioral and Mental Health Care</source>
          <year>2016</year>
          <publisher-loc>Cambridge, MA</publisher-loc>
          <publisher-name>Academic Press</publisher-name>
          <fpage>137</fpage>
          <lpage>162</lpage>
        </nlm-citation>
      </ref>
      <ref id="ref21">
        <label>21</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mitra</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Gupta</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Crossing the Chasm: Business Process to Information Systems</article-title>
          <source>Knowledge Reuse and Agile Processes: Catalysts for Innovation</source>
          <year>2008</year>
          <publisher-loc>Hershey, PA</publisher-loc>
          <publisher-name>IGI Global</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref22">
        <label>22</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <collab>National Academies of Sciences, Engineering, and Medicine</collab>
          </person-group>
          <source>Health Literacy and Consumer-Facing Technology</source>
          <year>2015</year>
          <publisher-loc>Washington, DC</publisher-loc>
          <publisher-name>The National Academies Press</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref23">
        <label>23</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <collab>National Academies of Sciences, Engineering, and Medicine</collab>
          </person-group>
          <source>Improving Health Professional Education and Practice Through Technology: Proceedings of a Workshop</source>
          <year>2018</year>
          <publisher-loc>Washington, DC</publisher-loc>
          <publisher-name>The National Academies Press</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref24">
        <label>24</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>McComb</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Simpson</surname>
              <given-names>V</given-names>
            </name>
          </person-group>
          <article-title>The concept of shared mental models in healthcare collaboration</article-title>
          <source>J Adv Nurs</source>
          <year>2014</year>
          <month>07</month>
          <volume>70</volume>
          <issue>7</issue>
          <fpage>1479</fpage>
          <lpage>1488</lpage>
          <pub-id pub-id-type="doi">10.1111/jan.12307</pub-id>
          <pub-id pub-id-type="medline">24237202</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref25">
        <label>25</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <collab>National Academies of Sciences, Engineering, and Medicine</collab>
          </person-group>
          <source>Using Technology to Advance Global Health: Proceedings of a Workshop</source>
          <year>2018</year>
          <publisher-loc>Washington, DC</publisher-loc>
          <publisher-name>The National Academies Press</publisher-name>
        </nlm-citation>
      </ref>
    </ref-list>
  </back>
</article>
