<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "journalpublishing.dtd"><article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" dtd-version="2.0" xml:lang="en" article-type="research-article"><front><journal-meta><journal-id journal-id-type="nlm-ta">JMIR Form Res</journal-id><journal-id journal-id-type="publisher-id">formative</journal-id><journal-id journal-id-type="index">27</journal-id><journal-title>JMIR Formative Research</journal-title><abbrev-journal-title>JMIR Form Res</abbrev-journal-title><issn pub-type="epub">2561-326X</issn><publisher><publisher-name>JMIR Publications</publisher-name><publisher-loc>Toronto, Canada</publisher-loc></publisher></journal-meta><article-meta><article-id pub-id-type="publisher-id">v9i1e60859</article-id><article-id pub-id-type="doi">10.2196/60859</article-id><article-categories><subj-group subj-group-type="heading"><subject>Original Paper</subject></subj-group></article-categories><title-group><article-title>Public Perception of the Brain-Computer Interface Based on a Decade of Data on X: Mixed Methods Study</article-title></title-group><contrib-group><contrib contrib-type="author"><name name-style="western"><surname>Almanna</surname><given-names>Mohammed A</given-names></name><degrees>MBBS</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="aff" rid="aff2">2</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Elkaim</surname><given-names>Lior M</given-names></name><degrees>MD</degrees><xref ref-type="aff" rid="aff3">3</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Alvi</surname><given-names>Mohammed A</given-names></name><degrees>MBBS, MSc</degrees><xref ref-type="aff" rid="aff4">4</xref><xref ref-type="aff" rid="aff5">5</xref><xref ref-type="aff" rid="aff6">6</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Levett</surname><given-names>Jordan J</given-names></name><degrees>MD</degrees><xref ref-type="aff" rid="aff7">7</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Li</surname><given-names>Ben</given-names></name><degrees>MD</degrees><xref ref-type="aff" rid="aff8">8</xref><xref ref-type="aff" rid="aff9">9</xref><xref ref-type="aff" rid="aff10">10</xref><xref ref-type="aff" rid="aff11">11</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Mamdani</surname><given-names>Muhammad</given-names></name><degrees>MA, MPH, PharmD</degrees><xref ref-type="aff" rid="aff10">10</xref><xref ref-type="aff" rid="aff11">11</xref><xref ref-type="aff" rid="aff12">12</xref><xref ref-type="aff" rid="aff13">13</xref><xref ref-type="aff" rid="aff14">14</xref><xref ref-type="aff" rid="aff15">15</xref><xref ref-type="aff" rid="aff16">16</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Al&#x2011;Omran</surname><given-names>Mohammed</given-names></name><degrees>MSc, MD</degrees><xref ref-type="aff" rid="aff8">8</xref><xref ref-type="aff" rid="aff9">9</xref><xref ref-type="aff" rid="aff10">10</xref><xref ref-type="aff" rid="aff11">11</xref><xref ref-type="aff" rid="aff15">15</xref><xref ref-type="aff" rid="aff17">17</xref><xref ref-type="aff" rid="aff18">18</xref></contrib><contrib contrib-type="author" corresp="yes"><name name-style="western"><surname>Alotaibi</surname><given-names>Naif M</given-names></name><degrees>MSc, MD</degrees><xref ref-type="aff" rid="aff17">17</xref><xref ref-type="aff" rid="aff19">19</xref></contrib></contrib-group><aff id="aff1"><institution>College of Medicine, King Saud Bin Abdulaziz University for Health Sciences</institution><addr-line>Riyadh</addr-line><country>Saudi Arabia</country></aff><aff id="aff2"><institution>King Abdullah International Medical Research Center</institution><addr-line>Riyadh</addr-line><country>Saudi Arabia</country></aff><aff id="aff3"><institution>Department of Neurology and Neurosurgery, McGill University</institution><addr-line>Montreal</addr-line><addr-line>QC</addr-line><country>Canada</country></aff><aff id="aff4"><institution>Division of Neurosurgery, Department of Surgery, University of Toronto</institution><addr-line>Toronto</addr-line><addr-line>ON</addr-line><country>Canada</country></aff><aff id="aff5"><institution>Neuro International Collaboration</institution><addr-line>Toronto</addr-line><addr-line>ON</addr-line><country>Canada</country></aff><aff id="aff6"><institution>Department of Neurologic Surgery, Mayo Clinic</institution><addr-line>Rochester</addr-line><addr-line>MN</addr-line><country>United States</country></aff><aff id="aff7"><institution>Faculty of Medicine, University of Montreal</institution><addr-line>Montreal</addr-line><addr-line>QC</addr-line><country>Canada</country></aff><aff id="aff8"><institution>Department of Surgery, University of Toronto</institution><addr-line>Toronto</addr-line><addr-line>ON</addr-line><country>Canada</country></aff><aff id="aff9"><institution>Division of Vascular Surgery, St. Michael&#x2019;s Hospital, Unity Health Toronto, University of Toronto</institution><addr-line>Toronto</addr-line><addr-line>ON</addr-line><country>Canada</country></aff><aff id="aff10"><institution>Institute of Medical Science, University of Toronto</institution><addr-line>Toronto</addr-line><addr-line>ON</addr-line><country>Canada</country></aff><aff id="aff11"><institution>Temerty Centre for Artificial Intelligence Research and Education in Medicine, University of Toronto</institution><addr-line>Toronto</addr-line><addr-line>ON</addr-line><country>Canada</country></aff><aff id="aff12"><institution>Data Science &#x0026; Advanced Analytics, Unity Health Toronto, University of Toronto</institution><addr-line>Toronto</addr-line><addr-line>ON</addr-line><country>Canada</country></aff><aff id="aff13"><institution>Institute of Health Policy, Management and Evaluation, University of Toronto</institution><addr-line>Toronto</addr-line><addr-line>ON</addr-line><country>Canada</country></aff><aff id="aff14"><institution>Institute for Clinical Evaluative Sciences, University of Toronto</institution><addr-line>Toronto</addr-line><addr-line>ON</addr-line><country>Canada</country></aff><aff id="aff15"><institution>Li Ka Shing Knowledge Institute, St. Michael&#x2019;s Hospital, Unity Health Toronto</institution><addr-line>Toronto</addr-line><addr-line>ON</addr-line><country>Canada</country></aff><aff id="aff16"><institution>Leslie Dan Faculty of Pharmacy, University of Toronto</institution><addr-line>Toronto</addr-line><addr-line>ON</addr-line><country>Canada</country></aff><aff id="aff17"><institution>College of Medicine, Alfaisal University</institution><addr-line>Riyadh</addr-line><country>Saudi Arabia</country></aff><aff id="aff18"><institution>Department of Surgery, King Faisal Specialist Hospital and Research Center</institution><addr-line>Riyadh</addr-line><country>Saudi Arabia</country></aff><aff id="aff19"><institution>National Neuroscience Institute, King Fahad Medical City</institution><addr-line>As Sulimaniyah, Makkah Road, Riyadh 12231</addr-line><addr-line>Riyadh</addr-line><country>Saudi Arabia</country></aff><contrib-group><contrib contrib-type="editor"><name name-style="western"><surname>Mavragani</surname><given-names>Amaryllis</given-names></name></contrib></contrib-group><contrib-group><contrib contrib-type="reviewer"><name name-style="western"><surname>Carvalho</surname><given-names>Darlinton</given-names></name></contrib><contrib contrib-type="reviewer"><name name-style="western"><surname>Gogna</surname><given-names>Yamini</given-names></name></contrib></contrib-group><author-notes><corresp>Correspondence to Naif M Alotaibi, MSc, MD, National Neuroscience Institute, King Fahad Medical City, As Sulimaniyah, Makkah Road, Riyadh 12231, Riyadh, 59046, Saudi Arabia, 966 50 952 7700; <email>naifalotaibi@kfmc.med.sa</email></corresp></author-notes><pub-date pub-type="collection"><year>2025</year></pub-date><pub-date pub-type="epub"><day>25</day><month>6</month><year>2025</year></pub-date><volume>9</volume><elocation-id>e60859</elocation-id><history><date date-type="received"><day>23</day><month>05</month><year>2024</year></date><date date-type="rev-recd"><day>15</day><month>11</month><year>2024</year></date><date date-type="accepted"><day>15</day><month>01</month><year>2025</year></date></history><copyright-statement>&#x00A9; Mohammed A Almanna, Lior M Elkaim, Mohammed A Alvi, Jordan J Levett, Ben Li, Muhammad Mamdani, Mohammed Al&#x2011;Omran, Naif M Alotaibi. Originally published in JMIR Formative Research (<ext-link ext-link-type="uri" xlink:href="https://formative.jmir.org">https://formative.jmir.org</ext-link>), 25.6.2025. </copyright-statement><copyright-year>2025</copyright-year><license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (<ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">https://creativecommons.org/licenses/by/4.0/</ext-link>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in JMIR Formative Research, is properly cited. The complete bibliographic information, a link to the original publication on <ext-link ext-link-type="uri" xlink:href="https://formative.jmir.org">https://formative.jmir.org</ext-link>, as well as this copyright and license information must be included.</p></license><self-uri xlink:type="simple" xlink:href="https://formative.jmir.org/2025/1/e60859"/><abstract><sec><title>Background</title><p>Given the recent evolution and achievements in brain-computer interface (BCI) technologies, understanding public perception and sentiments toward such novel technologies is important for guiding their communication strategies in marketing and education.</p></sec><sec><title>Objective</title><p>This study aims to explore the public perception of BCI technology by examining posts on X (formerly known as Twitter) using natural language processing (NLP) methods.</p></sec><sec sec-type="methods"><title>Methods</title><p>A mixed methods study was conducted on BCI-related posts from January 2010 to December 2021. The dataset included 65,340 posts from 38,962 unique users. This dataset was subject to a detailed NLP analysis including VADER, TextBlob, and NRCLex libraries, focusing on quantifying the sentiment (positive, neutral, and negative), the degree of subjectivity, and the range of emotions expressed in the posts. The temporal dynamics of sentiments were examined using the Mann-Kendall trend test to identify significant trends or shifts in public interest over time, based on monthly incidence. We used the Sentiment.ai tool to infer users&#x2019; demographics by matching predefined attributes in users&#x2019; profile biographies to certain demographic groups. We used the BERTopic tool for semantic understanding of discussions related to BCI.</p></sec><sec sec-type="results"><title>Results</title><p>The analysis showed a significant rise in BCI discussions in 2017, coinciding with Elon Musk&#x2019;s announcement of Neuralink. Sentiment analysis revealed that 59.38% (38,804/65,340) of posts were neutral, 32.75% (21,404/65,340) were positive, and 7.85% (5132/65,340) were negative. The average polarity score demonstrated a generally positive trend over the course of the study (Mann-Kendall Statistic=0.266; &#x03C4;=0.266; <italic>P</italic>&#x003C;.001). Most posts were objective (50,847/65,340, 77.81%), with a smaller proportion being subjective (14,393/65,340, 22.02%). Biographic analysis showed that the &#x201C;broadcasting&#x201D; group contributed the most to BCI discussions (17,803/58,030, 30.67%), while the &#x201C;scientific&#x201C; group, contributing 27.58% (n=16,005), had the highest overall engagement metrics. The emotional analysis identified anticipation (score = 10,802/52,618, 20.52%), trust (score=9244/52,618, 17.56%), and fear (score=7344/52,618, 13.95%) as the most prominent emotions in BCI discussions. Key topics included Neuralink and Elon Musk, practical applications of BCIs, and the potential for gamification.</p></sec><sec sec-type="conclusions"><title>Conclusions</title><p>This NLP-assisted study provides a decade-long analysis of public perception of BCI technology based on data from X. Overall, sentiments were neutral yet cautiously apprehensive, with anticipation, trust, and fear as the dominant emotions. The presence of fear underscores the need to address ethical concerns, particularly around data privacy, safety, and transparency. Transparent communication and ethical considerations are essential for building public trust and reducing apprehension. Influential figures and positive clinical outcomes, such as advancements in neuroprosthetics, could enhance favorable perceptions. The gamification of BCI, particularly in gaming and entertainment, also offers potential for wider public engagement and adoption. However, public perceptions on X may differ from other platforms, affecting the broader interpretation of results. Despite these limitations, the findings provide valuable insights for guiding future BCI developments, policy making, and communication strategies.</p></sec></abstract><kwd-group><kwd>brain-computer interface</kwd><kwd>BCI</kwd><kwd>Neuralink</kwd><kwd>sentiment analysis</kwd><kwd>public perception</kwd><kwd>technology</kwd><kwd>innovation</kwd><kwd>Twitter</kwd><kwd>social media</kwd><kwd>natural language processing</kwd><kwd>NLP</kwd><kwd>mixed method</kwd><kwd>semantic</kwd><kwd>Mann-Kendall</kwd><kwd>brain-machine interface</kwd><kwd>decade</kwd><kwd>data</kwd><kwd>public perceptions</kwd><kwd>sentiment</kwd><kwd>marketing</kwd><kwd>education</kwd></kwd-group></article-meta></front><body><sec id="s1" sec-type="intro"><title>Introduction</title><p>Brain-computer interface (BCI) is an emerging technology that allows for direct communication of the brain&#x2019;s signals to external devices. This innovation operates through four sequential stages: signal acquisition, feature extraction, feature translation, and generating the device output [<xref ref-type="bibr" rid="ref1">1</xref>]. The effectiveness of a BCI system largely depends on its signal acquisition module, which can range from noninvasive methods, such as surface electroencephalography electrodes, to more invasive approaches like endovascular stent-electrode arrays and electrodes implanted on the brain surface. While more invasive methods generally allow for higher signal fidelity and better system performance, overall effectiveness also depends on additional factors such as signal processing and user adaptability [<xref ref-type="bibr" rid="ref2">2</xref>,<xref ref-type="bibr" rid="ref3">3</xref>].</p><p>BCI has been extensively researched across various fields, including medical rehabilitation [<xref ref-type="bibr" rid="ref4">4</xref>-<xref ref-type="bibr" rid="ref6">6</xref>], control of orthotic and prosthetic devices [<xref ref-type="bibr" rid="ref7">7</xref>,<xref ref-type="bibr" rid="ref8">8</xref>], assistive technologies [<xref ref-type="bibr" rid="ref9">9</xref>,<xref ref-type="bibr" rid="ref10">10</xref>], and video gaming [<xref ref-type="bibr" rid="ref11">11</xref>]. Additionally, BCI has potential applications in enhancing cognitive functions [<xref ref-type="bibr" rid="ref12">12</xref>]. The remarkable progress in BCI, along with the involvement of well-recognized institutions and prominent figures like Elon Musk, has brought this technology to the forefront of public awareness. Public perception of BCI technologies is influenced by their representation in the media, with concerns over ethical dilemmas including privacy and mind control, and the invasive nature of certain BCI technologies [<xref ref-type="bibr" rid="ref13">13</xref>-<xref ref-type="bibr" rid="ref18">18</xref>].</p><p>X (formerly known as Twitter) provides real-time insights into the thoughts, feelings, and conversations of millions of users. Natural language processing (NLP) tools are instrumental in analyzing social media content, offering deeper insights into public perception. NLP methods enable the analysis of public sentiment toward specific topics, the detection of emerging trends, and the identification of demographic groups participating in these discussions. These tools have been extensively used to assess public acceptance of vaccines [<xref ref-type="bibr" rid="ref19">19</xref>,<xref ref-type="bibr" rid="ref20">20</xref>], guide economic investments [<xref ref-type="bibr" rid="ref21">21</xref>], evaluate innovative products [<xref ref-type="bibr" rid="ref22">22</xref>,<xref ref-type="bibr" rid="ref23">23</xref>], and more. BCI is an emerging technology with concepts that many may still consider science fiction, leading to polarized opinions among the public. While some individuals might be excited about its potential applications, others may express concerns due to potential complications and the possibility of malicious uses. Understanding public sentiment toward BCI is crucial for guiding ethical frameworks, informing policy decisions, and shaping the direction of future research and development. However, there is a lack of comprehensive studies assessing public perception of BCI through social media discussions. This study aims to bridge the gap by using NLP tools to analyze over a decade of X conversations about BCI. The goals of this study are to quantify sentiments, identify trends in public perception, explore subjectivity, and understand the nature of public discussions related to BCI.</p></sec><sec id="s2" sec-type="methods"><title>Methods</title><sec id="s2-1"><title>Data Source and Processing</title><p>We used X application programming interface, Twitter application programming interface for academic research, and database to identify posts related to BCI using the search term &#x201C;brain-computer interface.&#x201D; The acronym &#x201C;BCI&#x201D; was not used to avoid including irrelevant posts. The search was conducted from X inception (March 2006) to May 2022, prior to Elon Musk&#x2019;s acquisition of X. The data underwent preprocessing, which involved omitting any mentions, URLs, and hashtags, removing any line breaks, deleting any HTML characters, replacing them with their respective Unicode equivalent, eliminating any special characters or punctuation points except exclamation points (the only punctuation mark relevant for sentiment analysis), and excluding posts from users with fewer than 10 followers to minimize &#x201C;bot&#x201D; influence and duplicate entries. We excluded posts before January 2010 due to limited data availability and after December 2021 to maintain the temporal consistency of the dataset, as our data cover only a few months of 2022 (Figure S1 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>). Detailed individual post data included the text, date and time of posts, the number of reposts, replies, likes, and quote count. Additional data included whether the post included links, media, tagging, or any hashtags. User information included username, the number of followers, the total number of author posts, user biography, and location.</p></sec><sec id="s2-2"><title>Biography Analysis</title><p>To explore users&#x2019; demographics, we used the sentiment.ai [<xref ref-type="bibr" rid="ref24">24</xref>] library match function after preprocessing user biographies and excluding users with empty biographies. Sentiment.ai is a text-based deep machine learning tool that allows for category matching of the most similar phrase and its category, providing a cosine similarity score. The categories (ie, the biographic groups) and phrases (ie, attributes) are shown in Table S1 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>. We applied categories such as &#x201C;broadcast,&#x201D; &#x201C;scientific,&#x201D; &#x201C;entrepreneurship,&#x201D; and &#x201C;clinical&#x201D; based on predefined attributes. We measured the cosine similarity score between profile biographies and the most similar attribute, categorizing them as &#x201C;others&#x201D; if the similarity score is less than 0.05 [<xref ref-type="bibr" rid="ref23">23</xref>] (Figure S2 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>).</p></sec><sec id="s2-3"><title>Sentiment Quantification</title><sec id="s2-3-1"><title>Sentiment by Valence Aware Dictionary and Sentiment Reasoner</title><p>The sentiment polarity was analyzed using the VADER (Valence Aware Dictionary and Sentiment Reasoner) library [<xref ref-type="bibr" rid="ref25">25</xref>], a lexicon designed for sentiment analysis in social media contexts. It accounts for important elements including emojis, emoticons, slang words, and acronyms or initialisms with sentimental value (eg, &#x201C;lol&#x201D;) in determining the compound score. The compound score is calculated by adding the sentiment scores of each word, and it is set to be between &#x2212;1 (negative) and +1 (positive) after being adjusted according to a set of rules. The threshold in our study was a compound score of &#x2265;0.05 for positive sentiment posts, &#x003C;0.05 and &#x003E;&#x2212;0.05 for neutral sentiment posts, and &#x2264;&#x2212;0.05 for posts expressing negative sentiment (Figure S3 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>).</p></sec><sec id="s2-3-2"><title>Emotions by NRCLex</title><p>The NRCLex tool [<xref ref-type="bibr" rid="ref26">26</xref>] is a Python library that allows for the analysis of the emotional content of text using the National Research Council Canada emotion lexicon, which contains approximately 27,000 words. It provides a simple interface to extract various emotions and sentiments from text. The emotional effects measured include fear, anger, anticipation, trust, surprise, sadness, disgust, and joy. In our study, we used the &#x201C;raw_emotion_scores&#x201D; model of the NRCLex tool to count the frequency of words associated with certain emotions in a text. For example, a post containing 3 words associated with anticipation and 1 word associated with trust would have an emotion score of +3 for anticipation and +1 for trust. The primary emotion for each post was determined by identifying the emotion with the highest score, giving equal consideration to multiple emotions if they had equal scores. If no emotion was identified for a post, we used the label &#x201C;no emotion&#x201D; and excluded such posts from any emotion analysis. These analyses were applied only to posts with positive or negative sentiment, excluding posts with neutral sentiment, as they are not informative for emotion analysis (Figure S3 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>).</p></sec><sec id="s2-3-3"><title>Subjectivity by TextBlob</title><p>TextBlob library [<xref ref-type="bibr" rid="ref27">27</xref>] was used to classify X posts based on the subjectivity score (ranging from 0 to 1), which detects the degree of personal opinion expressed in the text. Words that are more opinion-based (eg, scary and amazing) have a higher subjectivity score, whereas words that are fact-based (eg, data and communication) have a lower subjectivity score. The threshold used in our study is a subjectivity score of 0.5. If the score is greater than or equal to 0.5, the post is labeled as &#x201C;subjective,&#x201D; otherwise the post is labeled as &#x201C;objective.&#x201D; For nontemporal analyses, we performed sensitivity analyses to verify the results by excluding duplicated texts (ie, similar text posted by different users) and conducted the sentiment and emotion analyses on 1000 randomly selected posts (Figures S3 and S4 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>).</p></sec></sec><sec id="s2-4"><title>Trends in Public Perception</title><p>Changes in sentiments were tracked over time. The Mann-Kendall trend test was applied to identify significant trends in monthly incidence. Temporally weighted analyses were used to identify any changes in users&#x2019; discussions and comments regarding BCI technologies over time.</p></sec><sec id="s2-5"><title>Topic Modeling</title><p>To better understand the context of topics within the complex discussion regarding BCI in X, we used the BERTopic tool [<xref ref-type="bibr" rid="ref28">28</xref>] to conduct a topic modeling analysis. BERTopic is a topic modeling technique that uses deep learning and NLP to better understand the context and semantic relationships within text data generating clusters of similar texts, interpreted as topics. It works by embedding text into numerical representations, which are converted into a high-dimensional vector using a pretrained transformer model. Text or sentences related to each other will be close to each other in this vector, while unrelated ones will be away from each other. After dimensionality reduction of the vector, the lower-dimensional embeddings are clustered to group similar documents together. Each cluster represents a potential topic. Finally, each topic is represented by its most representative documents or by extracting keywords that best describe the cluster. Results from this topic modeling analysis include the frequency of certain words within a topic and the probability that given words represent certain topics. We preprocessed the data by removing duplicate posts, then used the &#x201C;all-MiniLM-L6-v2&#x201D; embedding model and the SentenceTransformer model to analyze topics in the posts discussing BCI. The top 8 topics discussed in our dataset are visualized in topic word score bar charts, including specific topics for posts expressing positive or negative sentiments alone (Figure S4 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>)</p></sec><sec id="s2-6"><title>Statistical Analysis</title><p>All analyses were conducted using Python (version 3.8; Python Software Foundation) and R (version 4.4.0; R Foundation for Statistical Computing). The <italic>P</italic> value threshold for statistical significance in this study is set at .05.</p></sec><sec id="s2-7"><title>Ethical Considerations</title><p>All extracted data used and presented in this study were archival, cross-sectional, and observational, obtained from publicly accessible sources without any interaction with social media users and with their usernames omitted. As such, institutional review board approval was not required.</p></sec></sec><sec id="s3" sec-type="results"><title>Results</title><sec id="s3-1"><title>Post Characteristics</title><p>Our study analyzed a total of 65,340 posts, created by 38,962 distinct users (<xref ref-type="table" rid="table1">Table 1</xref>). These users had a median follower count of 662 (IQR 173-2332). The median number of total posts per user was 8976.5 (IQR 2294-33,677). Most of the posts came from a diverse user base, as only 5.21% (3405/65,340) originated from the top 50 most active contributors. The content of these posts varied, as 60,079 (91.94%) included links, while 5838 (8.93%) featured media. A total of 16,617 (25.43%) posts contained tags, and 18,623 (28.5%) posts contained hashtags. Engagement metrics showed that 17,141 (26.23%) posts received at least 1 like, 5104 (7.81%) posts had at least 1 reply, 12,210 (18.68%) posts were reposted at least once, and 2688 (4.11%) posts contained quote posts. Post characteristics from 1000 randomly selected posts and a text duplicate&#x2013;free sample are showcased in <xref ref-type="table" rid="table1">Table 1</xref>.</p><table-wrap id="t1" position="float"><label>Table 1.</label><caption><p>Overview of post characteristics with complete dataset and validation subsets in BCI discussions on X.</p></caption><table id="table1" frame="hsides" rules="groups"><thead><tr><td align="left" valign="top">Post characteristics</td><td align="left" valign="top">Total posts (n=65,340)<sup><xref ref-type="table-fn" rid="table1fn1">a</xref></sup>,<break/>n (%)</td><td align="left" valign="top">Duplicate texts removed (n=39,990)<sup><xref ref-type="table-fn" rid="table1fn2">b</xref></sup>,<break/>n (%)</td><td align="left" valign="top">Random 1000 posts<sup><xref ref-type="table-fn" rid="table1fn3">c</xref></sup>,<break/>n (%)</td></tr></thead><tbody><tr><td align="left" valign="top">At least 1 repost</td><td align="left" valign="top">12,210 (18.68)</td><td align="left" valign="top">8710 (21.78)</td><td align="left" valign="top">170 (17)</td></tr><tr><td align="left" valign="top">At least 1 reply</td><td align="left" valign="top">5104 (7.81)</td><td align="left" valign="top">4203 (10.51)</td><td align="left" valign="top">78 (7.8)</td></tr><tr><td align="left" valign="top">At least 1 like</td><td align="left" valign="top">17,141 (26.23)</td><td align="left" valign="top">12,640 (31.60)</td><td align="left" valign="top">255 (25.5)</td></tr><tr><td align="left" valign="top">At least 1 quote</td><td align="left" valign="top">2688 (4.11)</td><td align="left" valign="top">1849 (4.62)</td><td align="left" valign="top">32 (3.2)</td></tr><tr><td align="left" valign="top">Contains a link</td><td align="left" valign="top">60,079 (91.94)</td><td align="left" valign="top">35,020 (87.57)</td><td align="left" valign="top">913 (91.3)</td></tr><tr><td align="left" valign="top">Contains a media</td><td align="left" valign="top">5838 (8.93)</td><td align="left" valign="top">4218 (10.45)</td><td align="left" valign="top">80 (8)</td></tr><tr><td align="left" valign="top">Contains a tagging</td><td align="left" valign="top">16,617 (25.43)</td><td align="left" valign="top">12,122 (30.31)</td><td align="left" valign="top">254 (25.4)</td></tr><tr><td align="left" valign="top">Contains a hashtag</td><td align="left" valign="top">18,623 (28.50)</td><td align="left" valign="top">13,336 (33.34)</td><td align="left" valign="top">281 (28.1)</td></tr></tbody></table><table-wrap-foot><fn id="table1fn1"><p><sup>a</sup>The number of total unique users: 38,962.</p></fn><fn id="table1fn2"><p><sup>b</sup>The number of unique users of the duplicate-free subset: 25,008.</p></fn><fn id="table1fn3"><p><sup>c</sup>The number of unique users of the random 1000 posts subset: 959.</p></fn></table-wrap-foot></table-wrap></sec><sec id="s3-2"><title>User Biography</title><p>Biographic analysis (unique users=34,565) showed that &#x201C;broadcasting&#x201D; (10,171/34,565, 29.42%) was the highest group, followed by &#x201C;entrepreneurship&#x201D; (9359/34,565, 27.02%), &#x201C;scientific&#x201D; (9200/34,565, 26.61%), &#x201C;clinical&#x201D; (4066/34,565, 11.76%), and &#x201C;other&#x201D; (1769/34,565, 5.11%). The contribution to the BCI discussion (total posts=58,030) was the highest from users in the &#x201C;broadcasting&#x201D; group with 17,803 (30.67%) posts, followed by &#x201C;scientific&#x201D; with 16,005 (27.58%) posts, &#x201C;entrepreneurship&#x201D; with 14,008 (24.13%) posts, &#x201C;clinical&#x201D; with 7380 (12.71%) posts, and &#x201C;other&#x201D; with 2834 (4.88%) posts (<xref ref-type="table" rid="table2">Table 2</xref>). The &#x201C;scientific&#x201D; group had the highest engagement metrics among all the biography groups, while the &#x201C;entrepreneurship&#x201D; group had the lowest engagement metrics in our dataset (<xref ref-type="table" rid="table3">Table 3</xref>).</p><table-wrap id="t2" position="float"><label>Table 2.</label><caption><p>Distribution of unique users and total posts by biographic group in brain-computer interface discussions on X.</p></caption><table id="table2" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Biographic group</td><td align="left" valign="bottom">Unique users<sup><xref ref-type="table-fn" rid="table2fn1">a</xref></sup> (n=34,565), n (%)</td><td align="left" valign="bottom">Total posts<sup><xref ref-type="table-fn" rid="table2fn1">a</xref></sup> (n=58,030), n (%)</td></tr></thead><tbody><tr><td align="left" valign="top">Broadcasting</td><td align="left" valign="top">10,171 (29.42)</td><td align="left" valign="top">17,803 (30.67)</td></tr><tr><td align="left" valign="top">Entrepreneurship</td><td align="left" valign="top">9359 (27.02)</td><td align="left" valign="top">14,008 (24.13)</td></tr><tr><td align="left" valign="top">Scientific</td><td align="left" valign="top">9200 (26.61)</td><td align="left" valign="top">16,005 (27.58)</td></tr><tr><td align="left" valign="top">Clinical</td><td align="left" valign="top">4066 (11.76)</td><td align="left" valign="top">7380 (12.71)</td></tr><tr><td align="left" valign="top">Other</td><td align="left" valign="top">1769 (5.11)</td><td align="left" valign="top">2834 (4.88)</td></tr></tbody></table><table-wrap-foot><fn id="table2fn1"><p><sup>a</sup>Users with empty user biography were excluded from the analysis.</p></fn></table-wrap-foot></table-wrap><table-wrap id="t3" position="float"><label>Table 3.</label><caption><p>Post characteristics and engagement metrics across biographic groups in brain-computer interface discussions on X.</p></caption><table id="table3" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Post characteristics</td><td align="left" valign="bottom">Broadcasting<sup><xref ref-type="table-fn" rid="table3fn1">a</xref></sup>, n (%)</td><td align="left" valign="bottom">Entrepreneurship<sup><xref ref-type="table-fn" rid="table3fn2">b</xref></sup>, n (%)</td><td align="left" valign="bottom">Scientific<sup><xref ref-type="table-fn" rid="table3fn3">c</xref></sup>, n (%)</td><td align="left" valign="bottom">Clinical<sup><xref ref-type="table-fn" rid="table3fn4">d</xref></sup>, n (%)</td></tr></thead><tbody><tr><td align="left" valign="top">At least 1 repost</td><td align="left" valign="top">3549 (19.93)</td><td align="left" valign="top">2456 (17.53)</td><td align="left" valign="top">3675 (22.96)</td><td align="left" valign="top">1601 (21.69)</td></tr><tr><td align="left" valign="top">At least 1 reply</td><td align="left" valign="top">1596 (8.96)</td><td align="left" valign="top">986 (7.03)</td><td align="left" valign="top">1511 (9.44)</td><td align="left" valign="top">545 (7.38)</td></tr><tr><td align="left" valign="top">At least 1 like</td><td align="left" valign="top">4822 (27.08)</td><td align="left" valign="top">3571 (25.49)</td><td align="left" valign="top">5119 (31.98)</td><td align="left" valign="top">2266 (30.70)</td></tr><tr><td align="left" valign="top">At least 1 quote</td><td align="left" valign="top">934 (5.24)</td><td align="left" valign="top">482 (3.44)</td><td align="left" valign="top">831 (5.19)</td><td align="left" valign="top">286 (3.87)</td></tr><tr><td align="left" valign="top">Contains a link</td><td align="left" valign="top">16,307 (91.59)</td><td align="left" valign="top">13,058 (93.21)</td><td align="left" valign="top">14,568 (91.02)</td><td align="left" valign="top">6913 (93.67)</td></tr><tr><td align="left" valign="top">Contains a media</td><td align="left" valign="top">1628 (9.14)</td><td align="left" valign="top">1264 (9.02)</td><td align="left" valign="top">1600 (9.99)</td><td align="left" valign="top">730 (9.89)</td></tr><tr><td align="left" valign="top">Contains a tagging</td><td align="left" valign="top">4128 (23.18)</td><td align="left" valign="top">3757 (26.82)</td><td align="left" valign="top">4766 (29.77)</td><td align="left" valign="top">1834 (24.85)</td></tr><tr><td align="left" valign="top">Contains a hashtag</td><td align="left" valign="top">4990 (28.02)</td><td align="left" valign="top">4655 (33.23)</td><td align="left" valign="top">4687 (29.28)</td><td align="left" valign="top">2289 (31.01)</td></tr></tbody></table><table-wrap-foot><fn id="table3fn1"><p><sup>a</sup>17,803 (30.67%) posts.</p></fn><fn id="table3fn2"><p><sup>b</sup>14,008 (24.13%) posts.</p></fn><fn id="table3fn3"><p><sup>c</sup>16,005 (27.58%) posts.</p></fn><fn id="table3fn4"><p><sup>d</sup>7380 (12.71%) posts.</p></fn></table-wrap-foot></table-wrap></sec><sec id="s3-3"><title>Engagement Trends</title><p>In 2017, there was a substantial increase in the number of posts, which accounted for approximately 24.52% of the entire dataset (<xref ref-type="fig" rid="figure1">Figure 1</xref>). March had the highest number of posts (n=3686), followed by April (n=3094) (<xref ref-type="fig" rid="figure2">Figure 2</xref>). This rise in March&#x2019;s posts was primarily due to Elon Musk&#x2019;s announcement of &#x201C;Neuralink,&#x201D; his BCI company [<xref ref-type="bibr" rid="ref29">29</xref>]. The words &#x201C;Neuralink,&#x201D; &#x201C;Musk,&#x201D; and &#x201C;Elon&#x201D; were collectively mentioned 5831 times in March. The number of posts in April can be attributed to the announcement of Facebook&#x2019;s BCI projects, with the term &#x201C;Facebook&#x201D; receiving 1944 mentions [<xref ref-type="bibr" rid="ref30">30</xref>]. A bar chart containing the 25 most frequently mentioned terms, excluding BCI-related terms is included in Figure S5 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>.</p><fig position="float" id="figure1"><label>Figure 1.</label><caption><p>The number of posts shared on X discussing brain-computer interface (BCI) annually from 2010 to 2021. From 2010 to 2014, the number of posts remained relatively steady, fluctuating but less than 4000 per year. A gradual increase began in 2015, peaking sharply in 2017 at around 16,000 posts, marking the highest level of activity in the timeline. This spike coincides with the public announcements of Elon Musk&#x2019;s BCI company, Neuralink, and Facebook&#x2019;s BCI project. Following this peak, there was a substantial drop in 2018, with post numbers returning to earlier levels. From 2019 to 2021, the number of posts showed a fluctuating yet gradual increase.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="formative_v9i1e60859_fig01.png"/></fig><fig position="float" id="figure2"><label>Figure 2.</label><caption><p>The number of posts shared on X discussing brain-computer interface (BCI) per month of the year 2017. The number of posts increases from January, peaking in March and April at over 3000 posts each month. The peak in March (a) coincides with Neuralink&#x2019;s public announcement, while the peak in April (b) aligns with Facebook&#x2019;s announcement of its BCI project. Following these peaks, there is a sharp decline in May, with the number of posts dropping to the lowest point in June. A slight increase is observed in July, but the number of posts remains lower than the earlier peaks, stabilizing at lower levels from September to December.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="formative_v9i1e60859_fig02.png"/></fig></sec><sec id="s3-4"><title>Sentiment Quantification and Trends in Public Perception</title><p>Most of the posts tended to express neutral sentiments (38,804/65,340, 59.38%). Approximately one-third of the posts conveyed positive sentiments (21,404/65,340, 32.75%), while a smaller portion expressed negative sentiments (5132/65,340, 7.85%). Deletion of duplicate text resulted in a notable polarization of sentiments, potentially due to the exclusion of posts containing only titles of news articles, which tend to be more neutral in tone (<xref ref-type="table" rid="table4">Table 4</xref>). A considerable increase in positive posts was observed in February 2017, coinciding with the publication of Stanford-led BCI studies, and in July 2017, following the Defense Advanced Research Projects Agency (DARPA) award for BCI [<xref ref-type="bibr" rid="ref31">31</xref>-<xref ref-type="bibr" rid="ref33">33</xref>]. The number of posts expressing negative sentiment remained low, with an average of 428 (8.19%) posts per year throughout the study period (<xref ref-type="fig" rid="figure3">Figure 3</xref>; Figure S6 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>). The average polarity score showed an overall positive trend throughout the study (Mann-Kendall Statistic=0.266; &#x03C4;=0.266; <italic>P</italic>&#x003C;.001; Figure S7 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>). The mean sentiment score increased substantially upon the announcement of the collaboration between Stanford University and BrainGate in November 2011, in April 2016 following the publication of the study on the use of BCI in restoring functional movement in apatient with quadriplegia, and again in July 2017 with the DARPA-led award [<xref ref-type="bibr" rid="ref33">33</xref>-<xref ref-type="bibr" rid="ref36">36</xref>]. The spike in negative sentiment posts in March 2017 was mostly due to discussion related to Musk&#x2019;s involvement in BCI, with an excited yet conservative tone. The word &#x201C;help&#x201D; (n=1489 mentions) was common in posts expressing positive sentiment, whereas in posts with negative sentiment, the term &#x201C;injury&#x201D; (n=325 mentions) was the most mentioned word. The frequent mentions of Elon Musk and artificial intelligence (AI) in the positive and negative sentiments are suggestive of mixed and polarized opinions about him and his BCI company &#x201C;Neuralink&#x201D; (Figure S8 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>). Most of the posts were objective (50,847/65,340, 77.81%), while fewer were subjective (14,393/65,340, 22.02%; <xref ref-type="table" rid="table4">Table 4</xref>).</p><p>The most prevalent emotion scores observed were &#x201C;anticipation,&#x201D; &#x201C;trust,&#x201D; and &#x201C;fear,&#x201D; accounting for 20.52% (10,802/52,618), 17.56% (9244/52,618), and 13.95% (7344/52,618) of the expressions, respectively. However, &#x201C;surprise,&#x201D; &#x201C;joy,&#x201D; and &#x201C;disgust&#x201D; were less frequently expressed, constituting 8.77% (4619/52,618), 12.69% (6681/52,618), and 2.79% (1470/52,618), respectively (<xref ref-type="table" rid="table5">Table 5</xref>). The emotion score of the text duplicate&#x2013;free dataset and random 1000 posts are in <xref ref-type="table" rid="table5">Table 5</xref>. Focusing on the 3 most prominent emotions, &#x201C;anticipation&#x201D; peaked at 47.49% in 2010, coinciding with the sharing of news regarding the potential gamification of BCI technologies [<xref ref-type="bibr" rid="ref37">37</xref>]. In July 2017, there was a substantial increase in posts expressing anticipation as their primary emotion reaching as high as 48.07% of posts shared, discussions were mostly related to DARPA&#x2019;s award for BCI, and the use of BCI in the production of music using the &#x201C;encephalophone&#x201D; [<xref ref-type="bibr" rid="ref33">33</xref>,<xref ref-type="bibr" rid="ref38">38</xref>,<xref ref-type="bibr" rid="ref39">39</xref>]. &#x201C;Trust&#x201D; reached 23.43% of total posts in 2021 and was the lowest in 2016 with 3.57% of total posts. &#x201C;Fear&#x201D; displayed a notable increase in posts in 2016, with 25.90%, and was the lowest in 2013 with 3.34% (<xref ref-type="fig" rid="figure4">Figure 4</xref>; Figure S9 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>).</p><table-wrap id="t4" position="float"><label>Table 4.</label><caption><p>Overview of sentiment and subjectivity in brain-computer interface discussions on X across the complete dataset and validation subsets.</p></caption><table id="table4" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom"/><td align="left" valign="bottom">Total posts (n=65,340), n (%)</td><td align="left" valign="bottom">Duplicate texts removed (n=39,990), n (%)</td><td align="left" valign="bottom">Random 1000 posts, n (%)</td></tr></thead><tbody><tr><td align="left" valign="top" colspan="4">Sentiment</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Positive sentiment</td><td align="left" valign="top">21,404 (32.75)</td><td align="left" valign="top">15,390 (38.48)</td><td align="left" valign="top">334 (33.4)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Neutral sentiment</td><td align="left" valign="top">38,804 (59.38)</td><td align="left" valign="top">20,924 (52.32)</td><td align="left" valign="top">591 (59.1)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Negative sentiment</td><td align="left" valign="top">5132 (7.85)</td><td align="left" valign="top">3676 (9.1)</td><td align="left" valign="top">75 (7.5)</td></tr><tr><td align="left" valign="top" colspan="4">Subjectivity</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Subjective</td><td align="left" valign="top">14,393 (22.02)</td><td align="left" valign="top">10,571 (26.4)</td><td align="left" valign="top">230 (23)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Objective</td><td align="left" valign="top">50,847 (77.81)</td><td align="left" valign="top">29,419 (73.5)</td><td align="left" valign="top">770 (77)</td></tr></tbody></table></table-wrap><fig position="float" id="figure3"><label>Figure 3.</label><caption><p>The annual number of brain-computer interface&#x2013;related posts on X from 2010 to 2021, categorized by sentiment. Positive sentiment posts (green line) increased significantly from 2016, peaking in 2017 at over 3500 posts, followed by a decline in 2018 and subsequent growth through 2021. Negative sentiment posts (red line) remained relatively steady throughout the period, showing minor fluctuations but consistently lower numbers compared to positive sentiment posts. Overall, positive sentiment posts were more prevalent than negative ones across all years.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="formative_v9i1e60859_fig03.png"/></fig><table-wrap id="t5" position="float"><label>Table 5.</label><caption><p>Distribution of positive and negative emotions in brain-computer interface discussions on X across the complete dataset and validation subsets.</p></caption><table id="table5" frame="hsides" rules="groups"><thead><tr><td align="left" valign="top">Emotion</td><td align="left" valign="top">Total posts, score<sup><xref ref-type="table-fn" rid="table5fn1">a</xref></sup><sup>,</sup><sup><xref ref-type="table-fn" rid="table5fn2">b</xref></sup> (%)</td><td align="left" valign="top">Duplicated text removed<sup><xref ref-type="table-fn" rid="table5fn3">c</xref></sup><sup>,</sup><sup><xref ref-type="table-fn" rid="table5fn2">b</xref></sup>, score (%)</td><td align="left" valign="top">Random 1000 posts,<sup><xref ref-type="table-fn" rid="table5fn2">b</xref></sup><sup>,</sup><sup><xref ref-type="table-fn" rid="table5fn4">d</xref></sup> score (%)</td></tr></thead><tbody><tr><td align="left" valign="top" colspan="4"><bold>Positive emotions</bold></td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Anticipation</td><td align="left" valign="top">10,802 (20.52)</td><td align="left" valign="top">7855 (20.52)</td><td align="left" valign="top">454 (22.07)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Trust</td><td align="left" valign="top">9244 (17.56)</td><td align="left" valign="top">7536 (19.69)</td><td align="left" valign="top">386 (18.76)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Joy</td><td align="left" valign="top">6681 (12.69)</td><td align="left" valign="top">5150 (13.45)</td><td align="left" valign="top">265 (12.88)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Surprise</td><td align="left" valign="top">4619 (8.77)</td><td align="left" valign="top">3261 (8.52)</td><td align="left" valign="top">174 (8.45)</td></tr><tr><td align="left" valign="top" colspan="4"><bold>Negative emotions</bold></td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Fear</td><td align="left" valign="top">7344 (13.95)</td><td align="left" valign="top">5083 (13.28)</td><td align="left" valign="top">274 (13.32)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Sadness</td><td align="left" valign="top">6623 (12.58)</td><td align="left" valign="top">4402 (11.50)</td><td align="left" valign="top">244 (11.86)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Anger</td><td align="left" valign="top">5745 (10.91)</td><td align="left" valign="top">3646 (9.52)</td><td align="left" valign="top">185 (8.99)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Disgust</td><td align="left" valign="top">1470 (2.79)</td><td align="left" valign="top">1317 (3.44)</td><td align="left" valign="top">74 (3.59)</td></tr></tbody></table><table-wrap-foot><fn id="table5fn1"><p><sup>a</sup>Total score, excluding posts expressing no emotion and neutral sentiment: 52,618</p></fn><fn id="table5fn2"><p><sup>b</sup>Emotions conveyed equally with similar scores are considered separately.</p></fn><fn id="table5fn3"><p><sup>c</sup>Total score, excluding posts expressing no emotion and neutral sentiment: 38,268</p></fn><fn id="table5fn4"><p><sup>d</sup>Total score, excluding posts expressing no emotion and neutral sentiment: 2057</p></fn></table-wrap-foot></table-wrap><fig position="float" id="figure4"><label>Figure 4.</label><caption><p>The annual trend of primary emotions&#x2014;trust, anticipation, and fear&#x2014;expressed in posts discussing brain-computer interface on X from 2010 to 2021. It indicates fluctuations in the presence of each emotion over time. Anticipation (light blue line) shows varied peaks and troughs throughout the timeline, with prominent peaks in 2010, 2013, and 2017. Trust (dark blue line) displays a more consistent pattern with smaller fluctuations and an increase toward the end of the period. Fear (purple line) remains relatively stable over the years, with slight peaks in 2012, 2015, and 2016, but generally shows lower percentages compared to anticipation and trust.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="formative_v9i1e60859_fig04.png"/></fig></sec><sec id="s3-5"><title>Temporally Weighted Sentiment Analyses</title><p>A temporally weighted analysis was conducted to explore the change in public discussion before, during, and after the spike in 2017 (<xref ref-type="table" rid="table6">Table 6</xref>). The increase in posts expressing a neutral sentiment (11,394/16,023, 71.11%), as well as the rise in posts expressing anticipation (1417/2512, 56.41%) and trust (n=1117/2512, 44.47%) in 2017, demonstrates a public that is excited yet cautious toward this emerging technology. However, after 2017, the public discussion polarized, as positive and negative views increased substantially, from 24.41% (3912/16,023) to 40.86% (10,391/25,430) and from 4.47% (717/16,023) to 9.41% (2393/25,430), respectively. Additionally, there was a notable increase in subjective posts, from 18.95% (3036/16,023) to 26.76% (6804/25,430), after 2017. Fear was substantially expressed before 2017 with 40.97% (1936/4725), then dropping to below 30% during and after 2017, possibly indicating the public excitement, and trust following the big announcements in the BCI industry in 2017. However, the increase from 23.01% (578/2512) to 28.3% (2237/7904) in fear might indicate unaddressed ethical questions.</p><table-wrap id="t6" position="float"><label>Table 6.</label><caption><p>Sentiment, subjectivity, and primary emotions over time in brain-computer interface discussions on X.</p></caption><table id="table6" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom"/><td align="left" valign="bottom">Before 2017 (n=23,887), n (%)</td><td align="left" valign="bottom">During 2017 (n=16,023), n (%)</td><td align="left" valign="bottom">After 2017 (n=25,430), n (%)</td></tr></thead><tbody><tr><td align="left" valign="top" colspan="4"><bold>Sentiment</bold></td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Positive</td><td align="left" valign="top">7101 (29.73)</td><td align="left" valign="top">3912 (24.41)</td><td align="left" valign="top">10,391 (40.86)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Neutral</td><td align="left" valign="top">14,764 (61.81)</td><td align="left" valign="top">11,394 (71.11)</td><td align="left" valign="top">12,646 (49.73)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Negative</td><td align="left" valign="top">2022 (8.46)</td><td align="left" valign="top">717 (4.47)</td><td align="left" valign="top">2393 (9.41)</td></tr><tr><td align="left" valign="top" colspan="4"><bold>Subjectivity</bold>&#x2003;</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Objective</td><td align="left" valign="top">19,234 (80.52)</td><td align="left" valign="top">12,987 (81.05)</td><td align="left" valign="top">18,626 (73.24)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Subjective</td><td align="left" valign="top">4653 (19.48)</td><td align="left" valign="top">3036 (18.95)</td><td align="left" valign="top">6804 (26.76)</td></tr><tr><td align="left" valign="top" colspan="4"><bold>Emotions</bold><sup><xref ref-type="table-fn" rid="table6fn1">a</xref></sup><sup>,<xref ref-type="table-fn" rid="table6fn2">b</xref></sup></td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Anticipation</td><td align="left" valign="top">1977 (41.84)</td><td align="left" valign="top">1417 (56.41)</td><td align="left" valign="top">3490 (44.15)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Trust</td><td align="left" valign="top">913 (19.32)</td><td align="left" valign="top">1117 (44.47)</td><td align="left" valign="top">3506 (44.36)</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Fear</td><td align="left" valign="top">1936 (40.97)</td><td align="left" valign="top">578 (23.01)</td><td align="left" valign="top">2237 (28.30)</td></tr></tbody></table><table-wrap-foot><fn id="table6fn1"><p><sup>a</sup>Posts expressing no primary emotion were excluded from the analysis. Emotions conveyed equally with similar scores are considered separately; hence, percentages exceeding 100%.</p></fn><fn id="table6fn2"><p><sup>b</sup>Before 2017: 4725 posts, during 2017: 2512 posts, and after 2017: 7904 posts.</p></fn></table-wrap-foot></table-wrap></sec><sec id="s3-6"><title>Topic Modeling</title><p>We conducted a topic modeling analysis to understand the semantic and dynamic discussion related to BCI technology. The main topics discussed in our dataset included ideas related to BCI changing the future, Musk&#x2019;s announcement of &#x201C;Neuralink,&#x201D; Facebook&#x2019;s involvement in BCI technology, and the gamification of BCI technologies (Figure S10 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>). Topics discussed in posts expressing positive sentiment included the advancement of BCI in research and health care and BCI advancements with virtual reality (VR) and augmented reality (AR; Figure S11 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>). Discussions in posts conveying negative sentiment were related to conspiracy theories of Musk or &#x201C;Neuralink,&#x201D; further demonstrating that the involvement of Musk in BCI was although highly influential but also particularly controversial. Moreover, our analysis detected discussions related to the potential use of BCI for malicious reasons including mind control (Figure S12 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>). Examples of posts related to different topics are included in Table S2 in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>.</p></sec></sec><sec id="s4" sec-type="discussion"><title>Discussion</title><sec id="s4-1"><title>Principal Findings</title><p>Our study highlights the complex and evolving public perception of BCI technology, as expressed on X discussions over the past decade. Sentiment analysis reveals that while a majority of the public holds neutral views, there is a notable undercurrent of anticipation, trust, and fear. The presence of these conflicting emotions suggests that while there is optimism about the potential of BCIs, concerns around ethical issues remain unresolved. Understanding these emotions is essential for the development and acceptance of BCI technology. Key topics that emerged from the discussions include Neuralink and Elon Musk&#x2014;subjects of both positive and negative conversations&#x2014;the practical applications of BCIs, and the potential for gamification of BCI technology. In addition to being key market drivers, addressing these factors will influence innovation and drive investments in the BCI space.</p></sec><sec id="s4-2"><title>Ethical Challenges</title><p>BCI is one of the most important technologies in neuroscience, sparking diverse opinions regarding its ethical implications [<xref ref-type="bibr" rid="ref40">40</xref>-<xref ref-type="bibr" rid="ref42">42</xref>]. Safety, justice, privacy, and security are the main concerns discussed in ethical literature [<xref ref-type="bibr" rid="ref43">43</xref>]. Some authors had concerns over the safety of implanting such invasive technologies, which could potentially cause serious health complications [<xref ref-type="bibr" rid="ref44">44</xref>], while others have criticized the biased narrative in the BCI literature, which tends to overlook the perspectives of individuals with disabilities [<xref ref-type="bibr" rid="ref45">45</xref>]. There was unease about the handling and storage of sensitive data obtained through BCI, with worries about its potential exploitation for malicious purposes [<xref ref-type="bibr" rid="ref46">46</xref>]. In our study, we detected discussions reflecting many of these concerns. We specifically highlighted concerns regarding Elon Musk and his company Neuralink, particularly related to safety and animal rights issues [<xref ref-type="bibr" rid="ref47">47</xref>], as well as the potential for BCI to be used in terrorism, or for brain hacking, which could lead to the leaking of sensitive personal information or even revealing emotions and thoughts. Open and inclusive discussions are essential to guiding the ethical development and use of BCI technology. Moreover, users from the scientific community will play a particularly critical role in this process. Our study found that posts from scientists generated the highest engagement when discussing BCI, underscoring their influence in shaping public understanding. By leveraging their trusted position, scientists can educate and foster informed discourse on these ethical issues to steer the responsible development and application of BCI technology.</p></sec><sec id="s4-3"><title>Elon Musk&#x2019;s Involvement</title><p>The significant rise in digital discussions about BCI in X, particularly in March 2017, aligns with Elon Musk&#x2019;s announcement of his BCI company, Neuralink. Elon Musk&#x2019;s substantial social media influence is evident, with almost 200 million followers on X<italic>.</italic> His posts not only are influential but often polarize public opinion [<xref ref-type="bibr" rid="ref48">48</xref>]. For instance, his comments on cryptocurrencies have demonstrably impacted their market values [<xref ref-type="bibr" rid="ref49">49</xref>]. Additionally, a sentiment analysis focused on electric vehicles revealed that Musk was a central figure in these discussions [<xref ref-type="bibr" rid="ref50">50</xref>]. These observations reinforce the significant role of influential figures like Elon Musk in steering the public dialogue and development of cutting-edge technologies.</p><p>Most posts conveyed a neutral sentiment, reflecting a combination of anticipation and excitement, yet mixed with fear and anxiety, resulting in a generally doubtful public perception. This perception could potentially be the result of unanswered ethical challenges. Furthermore, Musk&#x2019;s involvement in BCI technologies has significantly influenced public perceptions, often polarizing opinions. The 2023 sentiment analysis conducted on Reddit indicates that neutral and negative views toward Elon Musk outnumber positive sentiments in discussions about him [<xref ref-type="bibr" rid="ref48">48</xref>]. Additionally, in our study, a common theme in both the negative and positive discussions about BCI frequently centered around Musk or Neuralink. This mixed perception of Elon Musk could be another contributing factor to the sentiment results of our study. Ensuring transparent communication from influential figures like Musk might be key to positively shifting public sentiment toward BCI technologies.</p><p>Since acquiring Twitter, now rebranded as X, Elon Musk has become actively involved in geopolitical discourse, leading to conflicting opinions about his involvement in politics. This increased political engagement has further polarized public perception of him [<xref ref-type="bibr" rid="ref51">51</xref>,<xref ref-type="bibr" rid="ref52">52</xref>], which might potentially extend to BCI technologies. To mitigate this migration of polarization, it is essential to diversify the voices in BCI communication by encouraging scientists and industry experts to take a more prominent role, thereby reducing the focus on any single individual.</p><p>Nonetheless, Musk&#x2019;s involvement in BCI technology has had a significant and positive impact, contributing to greater public awareness and advancements in the field. The surge in BCI-related discussions in 2017, following Musk&#x2019;s announcement of Neuralink, not only increased awareness of the technology and its potential applications but also led to a rise in posts expressing positive sentiments, mixed with anticipation and trust. This suggests a general optimism regarding the potential of BCI technology. Furthermore, this trust and enthusiasm is reflected in the recent growth of BCI-related publications as well as the rapid expansion of the BCI market [<xref ref-type="bibr" rid="ref53">53</xref>,<xref ref-type="bibr" rid="ref54">54</xref>]. In summary, even though Musk is considered a controversial figure by many, his influence in the field of BCI has been largely positive. However, it is important to continue making conscious efforts to ensure that this positive impact is sustained and that the technology develops responsibly and ethically.</p></sec><sec id="s4-4"><title>Bridging Public Sentiment and Practical Applications</title><p>We found that anticipation and trust were the most expressed emotions, likely stemming from the involvement of renowned universities in BCI research, including the collaborative BrainGate2 project [<xref ref-type="bibr" rid="ref34">34</xref>,<xref ref-type="bibr" rid="ref35">35</xref>]. The widespread media coverage and remarkable BCI outcomes in individuals with severe disabilities have also contributed to this trend [<xref ref-type="bibr" rid="ref31">31</xref>,<xref ref-type="bibr" rid="ref32">32</xref>,<xref ref-type="bibr" rid="ref36">36</xref>]. These technologies enable patients with disability to overcome their disabilities, thereby enhancing their quality of life. The public enthusiasm observed toward these milestones underscores a substantial societal demand for such innovations. This heightened positive sentiment indicates that the integration of BCIs into neuroprosthetics, AI-driven prosthetics, and exoskeleton devices not only represents a significant technological advancement but also fulfills a critical public need for effective solutions to address disabilities. Moreover, BCI technology used for nonrehabilitation activities, such as gaming and music production, led to an increase in anticipation due to its potential appeal and application to everyday consumers.</p><p>Fear was the third most prevalent emotion identified. This may be related to ongoing unanswered ethical concerns in BCI development, varying public opinions about public figures, and inherent fear toward novel technologies. Our capture of fear might be an early indicator of potential technophobia, as BCI becomes more commercially available. Difficult to understand and complex technologies are associated with more anxiety and fear of using them [<xref ref-type="bibr" rid="ref55">55</xref>]. In addition, the concept of &#x201C;mind control,&#x201D; frequently portrayed in popular culture and science fiction, could potentially heighten fear perceptions. Upcoming depictions and future works that continue to explore this theme may trigger more apprehensive emotions. Such emotion, if not addressed, could foster a conservative mindset, potentially slowing the adoption and hindering the application of innovative technologies such as BCI.</p></sec><sec id="s4-5"><title>The Gamification of BCI Technologies</title><p>One particularly exciting avenue for BCI technology is its integration into gaming. The potential of gamification in BCI technologies is vast, offering applications not only for medical rehabilitation [<xref ref-type="bibr" rid="ref56">56</xref>,<xref ref-type="bibr" rid="ref57">57</xref>] but also for entertainment experiences [<xref ref-type="bibr" rid="ref58">58</xref>-<xref ref-type="bibr" rid="ref60">60</xref>]. The concept of controlling and fully immersing oneself in a game to create experiences beyond physical limitations is highly appealing to the public, as evidenced by a notable increase in anticipation. Gabe Newell, cofounder and president of Valve, showcased Valve&#x2019;s interest in BCI for gaming, sparking significant enthusiasm [<xref ref-type="bibr" rid="ref61">61</xref>]. Integrating BCI technologies into gaming could play an important role in further advancing their development, much like the early adoption of VR and AR through gaming platforms such as the HTC Vive (by HTC and Valve Corporation), Meta Quest (by Meta Platforms), and PlayStation VR (by Sony Interactive Entertainment). These technologies, which began in gaming, have since expanded into broader applications in healthcare and education.[<xref ref-type="bibr" rid="ref62">62</xref>]. Fueled by public enthusiasm, several companies have begun claiming that their products leverage BCI to enhance gaming experiences. However, it remains debatable whether we have truly reached that point [<xref ref-type="bibr" rid="ref11">11</xref>]. Numerous attempts have been made to use BCI for gaming, using both invasive and minimally invasive technologies. For example, Neuralink has successfully implemented a BCI in a human with quadriplegia, allowing the individual to play chess and other games by moving a cursor [<xref ref-type="bibr" rid="ref63">63</xref>]. &#x201C;I basically have an aimbot in my head,&#x201D; said Noland Arbaugh, the first patient to receive a brain-computer chip from Neuralink. An &#x201C;aimbot&#x201D; is a type of cheating software used in video games that automatically aims at opponents at a superhuman speed [<xref ref-type="bibr" rid="ref64">64</xref>,<xref ref-type="bibr" rid="ref65">65</xref>]. Similarly, a popular Twitch streamer successfully beat the notoriously difficult game <italic>Elden Ring</italic> using a wearable electroencephalograpy-based BCI device, with thousands of live viewers [<xref ref-type="bibr" rid="ref66">66</xref>]. With a global video game market size estimated at US $217.06 billion in 2022, these advancements are likely to positively influence public perception of BCI and may attract the attention of major players in the gaming industry, potentially driving further research and development of BCI technologies [<xref ref-type="bibr" rid="ref67">67</xref>]. Gamification thus presents a significant opportunity for the future of BCIs, acting as a gateway for mainstream consumer adoption, similar to how VR or AR technologies gained traction.</p></sec><sec id="s4-6"><title>Public Acceptance and Trust: Key Drivers for the Growing BCI Market</title><p>The economic outlook for BCIs is promising. In 2023, the global market for BCIs was valued at US $2.0 billion and is projected to grow at a compound annual growth rate of 17.8% from 2024 to 2030. This growth is driven by increasing demand for neuroprosthetic devices and advancements in technology that enhance mobility and communication for patients who are paralyzed as well as expanding applications in gaming and military communication [<xref ref-type="bibr" rid="ref53">53</xref>]. However, public opinion and acceptance are essential to sustaining this market growth. Greater acceptance of the technology will drive further adoption and development, especially since young investors and future philanthropists are likely to support companies that align with their personal values and contribute to humanitarian goals [<xref ref-type="bibr" rid="ref68">68</xref>]. Our findings align with this broader market analysis. Most individuals in our study exhibit a neutral sentiment toward BCI technology, reflecting a phase of critical appraisal of this emerging innovation. Additionally, the prominence of emotions such as trust, anticipation, and fear underscores the imperative to educate, demystify, and familiarize the public with BCI technology. Doing so will be key to enhancing acceptance among the public. Additionally, the increase in the number of subjective posts in our analysis reflects increased awareness of BCI as a topic for discussion, and this growing awareness suggests that the public will begin to form their own ideas, opinions, and emotions toward BCI. This awareness might also indicate that people are shifting from passive observers to active participants in BCI discussions, contributing to the evolving conversation and potentially influencing its trajectory. Therefore, a concerted effort to inform and engage the public is crucial for fostering positive sentiment and ensuring the responsible development and integration of BCI technology. This, in turn, will support the continued expansion of the global BCI market.</p></sec><sec id="s4-7"><title>Limitations</title><p>The study has several limitations that may have impacted the accuracy of our results. First, the search excluded the term &#x201C;BCI&#x201D; to avoid including irrelevant posts, which may have inadvertently omitted relevant content. Additionally, the analysis was restricted to English-language posts and focused solely on the platform X, potentially overlooking important discussions on other platforms or in different languages. It was not possible to infer some of the important demographics of users missing out on important contextual information. We used existing tools for NLP analyses that were not tailored and validated specifically for the study&#x2019;s topic, which may potentially lead to inaccurate results. Determining the geographical locations of users was not feasible, which would have provided valuable regional insights. Regarding the biography analysis, some overlap between categories is expected, as a single user may fall into multiple classifications. This method of short bioclassification is effective for only a subset of users, and further validation is required in future studies. Finally, the substantial number of posts containing links may indicate that the sample is not representative of general discussions but primarily consists of shared news articles. Future research should address these limitations to enhance the depth and accuracy of the understanding of the public perception toward BCI technologies.</p></sec><sec id="s4-8"><title>Conclusions</title><p>The findings from this NLP-assisted study offer a decade-long overview of public perception of BCI technology. Overall sentiment was mostly neutral, but the emotions most commonly linked to BCI&#x2014;anticipation, trust, and fear&#x2014;reflect a complex emotional response, suggesting a cautiously optimistic yet apprehensive attitude toward the advancement of BCI technology. Notably, the presence of fear underscores the importance of addressing ethical concerns and ensuring transparent communication within the BCI field. Resolving these issues is critical for reassuring the public and mitigating apprehensive attitudes. Additionally, the involvement of influential figures and leading institutions, along with reports of positive clinical outcomes, such as advancements in neuroprosthetics and rehabilitation, may foster more favorable public perceptions of BCI technology. The gamification of BCI, particularly its integration into gaming and entertainment, also offers a pathway to increased public engagement and adoption. However, the persistence of fear signals potential resistance that may impede progress if left unaddressed. Prioritizing ethical transparency, expanding public education, and incorporating a more diverse range of voices in the discussion could help drive broader acceptance and responsible use of BCI. Although this study uses advanced AI tools to offer valuable insights into public sentiment, certain limitations, such as potential biases and incomplete demographic data, should be acknowledged. Nonetheless, the findings serve as a valuable reference point for guiding future technological developments, informing policy making, and crafting effective communication strategies within the BCI sector.</p></sec></sec></body><back><ack><p>The authors acknowledge the use of ChatGPT-4o (OpenAI, 2024) for grammatical checks, language refinement, and assistance in writing the codes used for analysis during the preparation of this manuscript. The tool was not used to generate any original text or ideas.</p></ack><notes><sec><title>Data Availability</title><p>The datasets generated or analyzed during this study are available from the corresponding author on reasonable request.</p></sec></notes><fn-group><fn fn-type="con"><p>MA Almanna, LME, JJL, MA Alvi, and NMA contributed to the acquisition, analysis, and interpretation of data. Statistical analysis was performed by MA Almanna and MA Alvi. Administrative, technical, and material support was provided by MA Almanna. NMA supervised the study. All authors were involved in the conception and design of the study as well as in drafting the manuscript and critically revising it. All authors reviewed the final manuscript.</p></fn><fn fn-type="conflict"><p>LME was affiliated with Neuralink, a brain-computer interface company. However, this study was conducted independently, without involvement from Neuralink or its personnel. The other authors report no conflicts of interest.</p></fn></fn-group><glossary><title>Abbreviations</title><def-list><def-item><term id="abb1">AI</term><def><p>artificial intelligence</p></def></def-item><def-item><term id="abb2">AR</term><def><p>augmented reality</p></def></def-item><def-item><term id="abb3">BCI</term><def><p>brain-computer interface</p></def></def-item><def-item><term id="abb4">DARPA</term><def><p>Defense Advanced Research Projects Agency</p></def></def-item><def-item><term id="abb5">NLP</term><def><p>natural language processing</p></def></def-item><def-item><term id="abb6">VADER</term><def><p>Valence Aware Dictionary and Sentiment Reasoner</p></def></def-item><def-item><term id="abb7">VR</term><def><p>virtual reality</p></def></def-item></def-list></glossary><ref-list><title>References</title><ref id="ref1"><label>1</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Wolpaw</surname><given-names>JR</given-names> </name><name name-style="western"><surname>Birbaumer</surname><given-names>N</given-names> </name><name name-style="western"><surname>McFarland</surname><given-names>DJ</given-names> </name><name name-style="western"><surname>Pfurtscheller</surname><given-names>G</given-names> </name><name name-style="western"><surname>Vaughan</surname><given-names>TM</given-names> </name></person-group><article-title>Brain&#x2013;computer interfaces for communication and control</article-title><source>Clin Neurophysiol</source><year>2002</year><month>06</month><volume>113</volume><issue>6</issue><fpage>767</fpage><lpage>791</lpage><pub-id pub-id-type="doi">10.1016/S1388-2457(02)00057-3</pub-id></nlm-citation></ref><ref id="ref2"><label>2</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Sun</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Chen</surname><given-names>X</given-names> </name><name name-style="western"><surname>Liu</surname><given-names>B</given-names> </name><etal/></person-group><article-title>Signal acquisition of brain-computer interfaces: a medical-engineering crossover perspective review</article-title><source>Fundam Res</source><year>2025</year><month>01</month><volume>5</volume><issue>1</issue><fpage>3</fpage><lpage>16</lpage><pub-id pub-id-type="doi">10.1016/j.fmre.2024.04.011</pub-id><pub-id pub-id-type="medline">40166113</pub-id></nlm-citation></ref><ref id="ref3"><label>3</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>He</surname><given-names>Q</given-names> </name><name name-style="western"><surname>Yang</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Ge</surname><given-names>P</given-names> </name><etal/></person-group><article-title>The brain nebula: minimally invasive brain-computer interface by endovascular neural recording and stimulation</article-title><source>J Neurointerv Surg</source><year>2024</year><month>11</month><day>22</day><volume>16</volume><issue>12</issue><fpage>1237</fpage><lpage>1243</lpage><pub-id pub-id-type="doi">10.1136/jnis-2023-021296</pub-id><pub-id pub-id-type="medline">38388478</pub-id></nlm-citation></ref><ref id="ref4"><label>4</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Mane</surname><given-names>R</given-names> </name><name name-style="western"><surname>Chouhan</surname><given-names>T</given-names> </name><name name-style="western"><surname>Guan</surname><given-names>C</given-names> </name></person-group><article-title>BCI for stroke rehabilitation: motor and beyond</article-title><source>J Neural Eng</source><year>2020</year><month>08</month><day>17</day><volume>17</volume><issue>4</issue><fpage>041001</fpage><pub-id pub-id-type="doi">10.1088/1741-2552/aba162</pub-id><pub-id pub-id-type="medline">32613947</pub-id></nlm-citation></ref><ref id="ref5"><label>5</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Daly</surname><given-names>JJ</given-names> </name><name name-style="western"><surname>Cheng</surname><given-names>RC</given-names> </name><name name-style="western"><surname>Hrovat</surname><given-names>K</given-names> </name><name name-style="western"><surname>Litinas</surname><given-names>KH</given-names> </name><name name-style="western"><surname>Rogers</surname><given-names>JM</given-names> </name><name name-style="western"><surname>Dohring</surname><given-names>ME</given-names> </name></person-group><article-title>Development and testing of non-invasive BCI FES/robot system for use in motor re-learning after stroke</article-title><access-date>2025-06-10</access-date><conf-name>Proceedings of the 13th International Functional Electrical Stimulation Society</conf-name><conf-date>Sep 24-28, 2008</conf-date><conf-loc>Germany</conf-loc><comment><ext-link ext-link-type="uri" xlink:href="https://www.academia.edu/28915386/Development_and_Testing_of_Non_Invasive_BCI_FES_Robot_Sys_tem_For_Use_in_Motor_Re_Learning_After_Stroke?sm=b">https://www.academia.edu/28915386/Development_and_Testing_of_Non_Invasive_BCI_FES_Robot_Sys_tem_For_Use_in_Motor_Re_Learning_After_Stroke?sm=b</ext-link></comment></nlm-citation></ref><ref id="ref6"><label>6</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Daly</surname><given-names>JJ</given-names> </name><name name-style="western"><surname>Wolpaw</surname><given-names>JR</given-names> </name></person-group><article-title>Brain-computer interfaces in neurological rehabilitation</article-title><source>Lancet Neurol</source><year>2008</year><month>11</month><volume>7</volume><issue>11</issue><fpage>1032</fpage><lpage>1043</lpage><pub-id pub-id-type="doi">10.1016/S1474-4422(08)70223-0</pub-id><pub-id pub-id-type="medline">18835541</pub-id></nlm-citation></ref><ref id="ref7"><label>7</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Hochberg</surname><given-names>LR</given-names> </name><name name-style="western"><surname>Serruya</surname><given-names>MD</given-names> </name><name name-style="western"><surname>Friehs</surname><given-names>GM</given-names> </name><etal/></person-group><article-title>Neuronal ensemble control of prosthetic devices by a human with tetraplegia</article-title><source>Nature New Biol</source><year>2006</year><month>07</month><day>13</day><volume>442</volume><issue>7099</issue><fpage>164</fpage><lpage>171</lpage><pub-id pub-id-type="doi">10.1038/nature04970</pub-id><pub-id pub-id-type="medline">16838014</pub-id></nlm-citation></ref><ref id="ref8"><label>8</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Pfurtscheller</surname><given-names>G</given-names> </name><name name-style="western"><surname>Guger</surname><given-names>C</given-names> </name><name name-style="western"><surname>M&#x00FC;ller</surname><given-names>G</given-names> </name><name name-style="western"><surname>Krausz</surname><given-names>G</given-names> </name><name name-style="western"><surname>Neuper</surname><given-names>C</given-names> </name></person-group><article-title>Brain oscillations control hand orthosis in a tetraplegic</article-title><source>Neurosci Lett</source><year>2000</year><month>10</month><day>13</day><volume>292</volume><issue>3</issue><fpage>211</fpage><lpage>214</lpage><pub-id pub-id-type="doi">10.1016/s0304-3940(00)01471-3</pub-id><pub-id pub-id-type="medline">11018314</pub-id></nlm-citation></ref><ref id="ref9"><label>9</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Gal&#x00E1;n</surname><given-names>F</given-names> </name><name name-style="western"><surname>Nuttin</surname><given-names>M</given-names> </name><name name-style="western"><surname>Lew</surname><given-names>E</given-names> </name><etal/></person-group><article-title>A brain-actuated wheelchair: asynchronous and non-invasive brain-computer interfaces for continuous control of robots</article-title><source>Clin Neurophysiol</source><year>2008</year><month>09</month><volume>119</volume><issue>9</issue><fpage>2159</fpage><lpage>2169</lpage><pub-id pub-id-type="doi">10.1016/j.clinph.2008.06.001</pub-id><pub-id pub-id-type="medline">18621580</pub-id></nlm-citation></ref><ref id="ref10"><label>10</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Tanaka</surname><given-names>K</given-names> </name><name name-style="western"><surname>Matsunaga</surname><given-names>K</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>HO</given-names> </name></person-group><article-title>Electroencephalogram-based control of an electric wheelchair</article-title><source>IEEE Trans Robot</source><year>2005</year><volume>21</volume><issue>4</issue><fpage>762</fpage><lpage>766</lpage><pub-id pub-id-type="doi">10.1109/TRO.2004.842350</pub-id></nlm-citation></ref><ref id="ref11"><label>11</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Cattan</surname><given-names>G</given-names> </name></person-group><article-title>The use of brain&#x2013;computer interfaces in games is not ready for the general public</article-title><source>Front Comput Sci</source><year>2021</year><volume>3</volume><fpage>20</fpage><pub-id pub-id-type="doi">10.3389/fcomp.2021.628773</pub-id></nlm-citation></ref><ref id="ref12"><label>12</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Yeo</surname><given-names>PS</given-names> </name><name name-style="western"><surname>Nguyen</surname><given-names>TN</given-names> </name><name name-style="western"><surname>Ng</surname><given-names>MPE</given-names> </name><etal/></person-group><article-title>Evaluation of the implementation and effectiveness of community-based brain-computer interface cognitive group training in healthy community-dwelling older adults: randomized controlled implementation trial</article-title><source>JMIR Form Res</source><year>2021</year><month>04</month><day>27</day><volume>5</volume><issue>4</issue><fpage>e25462</fpage><pub-id pub-id-type="doi">10.2196/25462</pub-id><pub-id pub-id-type="medline">33904819</pub-id></nlm-citation></ref><ref id="ref13"><label>13</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Musk</surname><given-names>E</given-names> </name><collab>Neuralink</collab></person-group><article-title>An integrated brain-machine interface platform with thousands of channels</article-title><source>J Med Internet Res</source><year>2019</year><month>10</month><day>31</day><volume>21</volume><issue>10</issue><fpage>e16194</fpage><pub-id pub-id-type="doi">10.2196/16194</pub-id><pub-id pub-id-type="medline">31642810</pub-id></nlm-citation></ref><ref id="ref14"><label>14</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kennedy</surname><given-names>PR</given-names> </name><name name-style="western"><surname>Bakay</surname><given-names>RA</given-names> </name></person-group><article-title>Restoration of neural output from a paralyzed patient by a direct brain connection</article-title><source>Neuroreport</source><year>1998</year><month>06</month><day>1</day><volume>9</volume><issue>8</issue><fpage>1707</fpage><lpage>1711</lpage><pub-id pub-id-type="doi">10.1097/00001756-199806010-00007</pub-id><pub-id pub-id-type="medline">9665587</pub-id></nlm-citation></ref><ref id="ref15"><label>15</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Georgopoulos</surname><given-names>AP</given-names> </name><name name-style="western"><surname>Schwartz</surname><given-names>AB</given-names> </name><name name-style="western"><surname>Kettner</surname><given-names>RE</given-names> </name></person-group><article-title>Neuronal population coding of movement direction</article-title><source>Science</source><year>1986</year><month>09</month><day>26</day><volume>233</volume><issue>4771</issue><fpage>1416</fpage><lpage>1419</lpage><pub-id pub-id-type="doi">10.1126/science.3749885</pub-id><pub-id pub-id-type="medline">3749885</pub-id></nlm-citation></ref><ref id="ref16"><label>16</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Laubach</surname><given-names>M</given-names> </name><name name-style="western"><surname>Wessberg</surname><given-names>J</given-names> </name><name name-style="western"><surname>Nicolelis</surname><given-names>MA</given-names> </name></person-group><article-title>Cortical ensemble activity increasingly predicts behaviour outcomes during learning of a motor task</article-title><source>Nature New Biol</source><year>2000</year><month>06</month><day>1</day><volume>405</volume><issue>6786</issue><fpage>567</fpage><lpage>571</lpage><pub-id pub-id-type="doi">10.1038/35014604</pub-id><pub-id pub-id-type="medline">10850715</pub-id></nlm-citation></ref><ref id="ref17"><label>17</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Taylor</surname><given-names>DM</given-names> </name><name name-style="western"><surname>Tillery</surname><given-names>SIH</given-names> </name><name name-style="western"><surname>Schwartz</surname><given-names>AB</given-names> </name></person-group><article-title>Direct cortical control of 3D neuroprosthetic devices</article-title><source>Science</source><year>2002</year><month>06</month><day>7</day><volume>296</volume><issue>5574</issue><fpage>1829</fpage><lpage>1832</lpage><pub-id pub-id-type="doi">10.1126/science.1070291</pub-id><pub-id pub-id-type="medline">12052948</pub-id></nlm-citation></ref><ref id="ref18"><label>18</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Margalit</surname><given-names>E</given-names> </name><name name-style="western"><surname>Weiland</surname><given-names>JD</given-names> </name><name name-style="western"><surname>Clatterbuck</surname><given-names>RE</given-names> </name><etal/></person-group><article-title>Visual and electrical evoked response recorded from subdural electrodes implanted above the visual cortex in normal dogs under two methods of anesthesia</article-title><source>J Neurosci Methods</source><year>2003</year><month>03</month><day>15</day><volume>123</volume><issue>2</issue><fpage>129</fpage><lpage>137</lpage><pub-id pub-id-type="doi">10.1016/s0165-0270(02)00345-x</pub-id><pub-id pub-id-type="medline">12606062</pub-id></nlm-citation></ref><ref id="ref19"><label>19</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Lyu</surname><given-names>JC</given-names> </name><name name-style="western"><surname>Han</surname><given-names>EL</given-names> </name><name name-style="western"><surname>Luli</surname><given-names>GK</given-names> </name></person-group><article-title>COVID-19 vaccine-related discussion on Twitter: topic modeling and sentiment analysis</article-title><source>J Med Internet Res</source><year>2021</year><month>06</month><day>29</day><volume>23</volume><issue>6</issue><fpage>e24435</fpage><pub-id pub-id-type="doi">10.2196/24435</pub-id><pub-id pub-id-type="medline">34115608</pub-id></nlm-citation></ref><ref id="ref20"><label>20</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Monselise</surname><given-names>M</given-names> </name><name name-style="western"><surname>Chang</surname><given-names>CH</given-names> </name><name name-style="western"><surname>Ferreira</surname><given-names>G</given-names> </name><name name-style="western"><surname>Yang</surname><given-names>R</given-names> </name><name name-style="western"><surname>Yang</surname><given-names>CC</given-names> </name></person-group><article-title>Topics and sentiments of public concerns regarding COVID-19 vaccines: social media trend analysis</article-title><source>J Med Internet Res</source><year>2021</year><month>10</month><day>21</day><volume>23</volume><issue>10</issue><fpage>e30765</fpage><pub-id pub-id-type="doi">10.2196/30765</pub-id><pub-id pub-id-type="medline">34581682</pub-id></nlm-citation></ref><ref id="ref21"><label>21</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Swathi</surname><given-names>T</given-names> </name><name name-style="western"><surname>Kasiviswanath</surname><given-names>N</given-names> </name><name name-style="western"><surname>Rao</surname><given-names>AA</given-names> </name></person-group><article-title>Retracted article: an optimal deep learning-based LSTM for stock price prediction using Twitter sentiment analysis</article-title><source>Appl Intell</source><year>2022</year><month>09</month><volume>52</volume><issue>12</issue><fpage>13675</fpage><lpage>13688</lpage><pub-id pub-id-type="doi">10.1007/s10489-022-03175-2</pub-id></nlm-citation></ref><ref id="ref22"><label>22</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ding</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Korolov</surname><given-names>R</given-names> </name><name name-style="western"><surname>(Al) Wallace</surname><given-names>W</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>X (Cara</given-names> </name></person-group><article-title>How are sentiments on autonomous vehicles influenced? An analysis using Twitter feeds</article-title><source>Transportation Research Part C: Emerging Technologies</source><year>2021</year><month>10</month><volume>131</volume><fpage>103356</fpage><pub-id pub-id-type="doi">10.1016/j.trc.2021.103356</pub-id></nlm-citation></ref><ref id="ref23"><label>23</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kober</surname><given-names>SE</given-names> </name><name name-style="western"><surname>Buchrieser</surname><given-names>F</given-names> </name><name name-style="western"><surname>Wood</surname><given-names>G</given-names> </name></person-group><article-title>Neurofeedback on Twitter: evaluation of the scientific credibility and communication about the technique</article-title><source>Heliyon</source><year>2023</year><month>08</month><volume>9</volume><issue>8</issue><fpage>e18931</fpage><pub-id pub-id-type="doi">10.1016/j.heliyon.2023.e18931</pub-id><pub-id pub-id-type="medline">37600360</pub-id></nlm-citation></ref><ref id="ref24"><label>24</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Wiseman</surname><given-names>B</given-names> </name></person-group><article-title>Sentiment.ai: simple sentiment analysis using deep learning</article-title><source>R Project</source><year>2022</year><access-date>2025-05-31</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://cran.r-project.org/web/packages/sentiment.ai/index.html">https://cran.r-project.org/web/packages/sentiment.ai/index.html</ext-link></comment></nlm-citation></ref><ref id="ref25"><label>25</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Hutto</surname><given-names>CJ</given-names> </name><name name-style="western"><surname>Gilbert</surname><given-names>EE</given-names> </name></person-group><article-title>VADER: a parsimonious rule-based model for sentiment analysis of social media text</article-title><source>ICWSM</source><volume>8</volume><issue>1</issue><fpage>216</fpage><lpage>225</lpage><pub-id pub-id-type="doi">10.1609/icwsm.v8i1.14550</pub-id></nlm-citation></ref><ref id="ref26"><label>26</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Mohammad</surname><given-names>SM</given-names> </name><name name-style="western"><surname>Turney</surname><given-names>PD</given-names> </name></person-group><article-title>Emotions evoked by common words and phrases: using mechanical turk to create an emotion lexicon</article-title><access-date>2025-06-10</access-date><conf-name>Proceedings of the NAACL HLT 2010 Workshop on Computational Approaches to Analysis and Generation of Emotion in Text</conf-name><conf-date>Jun 1-6, 2010</conf-date><conf-loc>Los Angeles, CA</conf-loc><fpage>26</fpage><lpage>34</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://aclanthology.org/W10-0204/">https://aclanthology.org/W10-0204/</ext-link></comment></nlm-citation></ref><ref id="ref27"><label>27</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Loria</surname><given-names>S</given-names> </name></person-group><article-title>TextBlob: simplified text processing</article-title><source>TextBlob</source><access-date>2025-05-31</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://textblob.readthedocs.io/">https://textblob.readthedocs.io/</ext-link></comment></nlm-citation></ref><ref id="ref28"><label>28</label><nlm-citation citation-type="other"><person-group person-group-type="author"><name name-style="western"><surname>Grootendorst</surname><given-names>M</given-names> </name></person-group><article-title>BERTopic: neural topic modeling with a class-based TF-IDF procedure</article-title><source>arXiv</source><comment>Preprint posted online on  Mar 11, 2022</comment><pub-id pub-id-type="doi">10.48550/arXiv.2203.05794</pub-id></nlm-citation></ref><ref id="ref29"><label>29</label><nlm-citation citation-type="other"><person-group person-group-type="author"><name name-style="western"><surname>Winkler</surname><given-names>R</given-names> </name></person-group><article-title>Elon musk launches Neuralink to connect brains with computers</article-title><source>The Wall Street Journal</source><year>2017</year><month>03</month><day>27</day><access-date>2025-05-18</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.wsj.com/articles/elon-musk-launches-neuralink-to-connect-brains-with-computers-1490642652">https://www.wsj.com/articles/elon-musk-launches-neuralink-to-connect-brains-with-computers-1490642652</ext-link></comment></nlm-citation></ref><ref id="ref30"><label>30</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Strickland</surname><given-names>E</given-names> </name></person-group><article-title>Facebook announces &#x201C;typing-by-brain&#x201D; project</article-title><source>IEEE Spectrum</source><year>2017</year><month>04</month><day>20</day><comment><ext-link ext-link-type="uri" xlink:href="https://spectrum.ieee.org/facebook-announces-typing-by-brain-project">https://spectrum.ieee.org/facebook-announces-typing-by-brain-project</ext-link></comment></nlm-citation></ref><ref id="ref31"><label>31</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Pandarinath</surname><given-names>C</given-names> </name><name name-style="western"><surname>Nuyujukian</surname><given-names>P</given-names> </name><name name-style="western"><surname>Blabe</surname><given-names>CH</given-names> </name><etal/></person-group><article-title>High performance communication by people with paralysis using an intracortical brain-computer interface</article-title><source>Elife</source><year>2017</year><month>02</month><day>21</day><volume>6</volume><fpage>e18554</fpage><pub-id pub-id-type="doi">10.7554/eLife.18554</pub-id><pub-id pub-id-type="medline">28220753</pub-id></nlm-citation></ref><ref id="ref32"><label>32</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Goldman</surname><given-names>B</given-names> </name></person-group><article-title>Brain-computer interface advance allows fast, accurate typing by people with paralysis</article-title><source>Wu Tsai Neurosciences Institute, Stanford University</source><year>2017</year><access-date>2025-05-31</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://neuroscience.stanford.edu/news/brain-computer-interface-advance-allows-fast-accurate-typing-people-paralysis">https://neuroscience.stanford.edu/news/brain-computer-interface-advance-allows-fast-accurate-typing-people-paralysis</ext-link></comment></nlm-citation></ref><ref id="ref33"><label>33</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Sulleyman</surname><given-names>A</given-names> </name></person-group><article-title>DARPA to plug computers into brains to let machines talk directly to people</article-title><source>The Independent</source><year>2017</year><access-date>2025-04-19</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.independent.co.uk/tech/darpa-computer-plug-in-brains-lost-vision-hearing-speech-restore-us-military-r-n-d-research-neural-engineering-system-design-a7835131.html">https://www.independent.co.uk/tech/darpa-computer-plug-in-brains-lost-vision-hearing-speech-restore-us-military-r-n-d-research-neural-engineering-system-design-a7835131.html</ext-link></comment></nlm-citation></ref><ref id="ref34"><label>34</label><nlm-citation citation-type="web"><article-title>BrainGate2: feasibility study of an intracortical neural interface system for persons with tetraplegia (NCT00912041)</article-title><source>ClinicalTrials.gov</source><year>2023</year><access-date>2023-12-30</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://clinicaltrials.gov/ct2/show/NCT00912041">https://clinicaltrials.gov/ct2/show/NCT00912041</ext-link></comment></nlm-citation></ref><ref id="ref35"><label>35</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Henderson</surname><given-names>J</given-names> </name></person-group><article-title>Stanford joins BrainGate team developing brain-computer interface to aid people with paralysis</article-title><source>Stanford Medicine News Center</source><year>2011</year><access-date>2025-05-18</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://med.stanford.edu/news/all-news/2011/11/stanford-joins-braingate-team-developing-brain-computer-interface-to-aid-people-with-paralysis.html">https://med.stanford.edu/news/all-news/2011/11/stanford-joins-braingate-team-developing-brain-computer-interface-to-aid-people-with-paralysis.html</ext-link></comment></nlm-citation></ref><ref id="ref36"><label>36</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Bouton</surname><given-names>CE</given-names> </name><name name-style="western"><surname>Shaikhouni</surname><given-names>A</given-names> </name><name name-style="western"><surname>Annetta</surname><given-names>NV</given-names> </name><etal/></person-group><article-title>Restoring cortical control of functional movement in a human with quadriplegia</article-title><source>Nature New Biol</source><year>2016</year><month>05</month><day>12</day><volume>533</volume><issue>7602</issue><fpage>247</fpage><lpage>250</lpage><pub-id pub-id-type="doi">10.1038/nature17435</pub-id><pub-id pub-id-type="medline">27074513</pub-id></nlm-citation></ref><ref id="ref37"><label>37</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Mulroy</surname><given-names>J</given-names> </name></person-group><article-title>Brain-computer interface can help you get in the game</article-title><source>PCWorld</source><year>2023</year><access-date>2025-04-19</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.pcworld.com/article/499690/brain_computer_helps_get_you_in_the_game.html">https://www.pcworld.com/article/499690/brain_computer_helps_get_you_in_the_game.html</ext-link></comment></nlm-citation></ref><ref id="ref38"><label>38</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Deuel</surname><given-names>TA</given-names> </name><name name-style="western"><surname>Pampin</surname><given-names>J</given-names> </name><name name-style="western"><surname>Sundstrom</surname><given-names>J</given-names> </name><name name-style="western"><surname>Darvas</surname><given-names>F</given-names> </name></person-group><article-title>The encephalophone: a novel musical biofeedback device using conscious control of electroencephalogram (EEG)</article-title><source>Front Hum Neurosci</source><year>2017</year><volume>11</volume><fpage>213</fpage><pub-id pub-id-type="doi">10.3389/fnhum.2017.00213</pub-id><pub-id pub-id-type="medline">28491030</pub-id></nlm-citation></ref><ref id="ref39"><label>39</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Chen</surname><given-names>A</given-names> </name></person-group><article-title>You can play this musical instrument with just your thoughts</article-title><source>The Verge</source><year>2017</year><access-date>2025-04-19</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.theverge.com/2017/7/13/15965836/encephalophone-musical-instrument-thought-brain-computer-interface-neuroscience">https://www.theverge.com/2017/7/13/15965836/encephalophone-musical-instrument-thought-brain-computer-interface-neuroscience</ext-link></comment></nlm-citation></ref><ref id="ref40"><label>40</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Clausen</surname><given-names>J</given-names> </name></person-group><article-title>Man, machine and in between</article-title><source>Nature New Biol</source><year>2009</year><month>02</month><day>26</day><volume>457</volume><issue>7233</issue><fpage>1080</fpage><lpage>1081</lpage><pub-id pub-id-type="doi">10.1038/4571080a</pub-id><pub-id pub-id-type="medline">19242454</pub-id></nlm-citation></ref><ref id="ref41"><label>41</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Nijboer</surname><given-names>F</given-names> </name><name name-style="western"><surname>Clausen</surname><given-names>J</given-names> </name><name name-style="western"><surname>Allison</surname><given-names>BZ</given-names> </name><name name-style="western"><surname>Haselager</surname><given-names>P</given-names> </name></person-group><article-title>The Asilomar survey: stakeholders&#x2019; opinions on ethical issues related to brain-computer interfacing</article-title><source>Neuroethics</source><year>2013</year><volume>6</volume><issue>3</issue><fpage>541</fpage><lpage>578</lpage><pub-id pub-id-type="doi">10.1007/s12152-011-9132-6</pub-id><pub-id pub-id-type="medline">24273623</pub-id></nlm-citation></ref><ref id="ref42"><label>42</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Demetriades</surname><given-names>AK</given-names> </name><name name-style="western"><surname>Demetriades</surname><given-names>CK</given-names> </name><name name-style="western"><surname>Watts</surname><given-names>C</given-names> </name><name name-style="western"><surname>Ashkan</surname><given-names>K</given-names> </name></person-group><article-title>Brain-machine interface: the challenge of neuroethics</article-title><source>Surgeon</source><year>2010</year><month>10</month><volume>8</volume><issue>5</issue><fpage>267</fpage><lpage>269</lpage><pub-id pub-id-type="doi">10.1016/j.surge.2010.05.006</pub-id><pub-id pub-id-type="medline">20709284</pub-id></nlm-citation></ref><ref id="ref43"><label>43</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Burwell</surname><given-names>S</given-names> </name><name name-style="western"><surname>Sample</surname><given-names>M</given-names> </name><name name-style="western"><surname>Racine</surname><given-names>E</given-names> </name></person-group><article-title>Ethical aspects of brain computer interfaces: a scoping review</article-title><source>BMC Med Ethics</source><year>2017</year><month>11</month><day>9</day><volume>18</volume><issue>1</issue><fpage>60</fpage><pub-id pub-id-type="doi">10.1186/s12910-017-0220-y</pub-id><pub-id pub-id-type="medline">29121942</pub-id></nlm-citation></ref><ref id="ref44"><label>44</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Wolpaw</surname><given-names>JR</given-names> </name><name name-style="western"><surname>Loeb</surname><given-names>GE</given-names> </name><name name-style="western"><surname>Allison</surname><given-names>BZ</given-names> </name><etal/></person-group><article-title>BCI meeting 2005&#x2014;workshop on signals and recording methods</article-title><source>IEEE Trans Neural Syst Rehabil Eng</source><year>2006</year><month>06</month><volume>14</volume><issue>2</issue><fpage>138</fpage><lpage>141</lpage><pub-id pub-id-type="doi">10.1109/TNSRE.2006.875583</pub-id><pub-id pub-id-type="medline">16792279</pub-id></nlm-citation></ref><ref id="ref45"><label>45</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Wolbring</surname><given-names>G</given-names> </name><name name-style="western"><surname>Diep</surname><given-names>L</given-names> </name><name name-style="western"><surname>Yumakulov</surname><given-names>S</given-names> </name><name name-style="western"><surname>Ball</surname><given-names>N</given-names> </name><name name-style="western"><surname>Leopatra</surname><given-names>V</given-names> </name><name name-style="western"><surname>Yergens</surname><given-names>D</given-names> </name></person-group><article-title>Emerging therapeutic enhancement enabling health technologies and their discourses: what is discussed within the health domain?</article-title><source>Healthcare (Basel)</source><year>2013</year><month>07</month><day>25</day><volume>1</volume><issue>1</issue><fpage>20</fpage><lpage>52</lpage><pub-id pub-id-type="doi">10.3390/healthcare1010020</pub-id><pub-id pub-id-type="medline">27429129</pub-id></nlm-citation></ref><ref id="ref46"><label>46</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>KY</given-names> </name></person-group><article-title>Get out of my head: an examination of potential brain-computer interface data privacy concerns</article-title><source>Boston Coll Intellect Prop Technol Forum</source><year>2018</year><volume>2018</volume><fpage>1</fpage><lpage>12</lpage></nlm-citation></ref><ref id="ref47"><label>47</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Levy</surname><given-names>R</given-names> </name></person-group><article-title>Exclusive: Musk&#x2019;s Neuralink faces federal probe, employee backlash over animal tests</article-title><source>Reuters</source><year>2022</year><access-date>2025-04-19</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.reuters.com/technology/musks-neuralink-faces-federal-probe-employee-backlash-over-animal-tests-2022-12-05">https://www.reuters.com/technology/musks-neuralink-faces-federal-probe-employee-backlash-over-animal-tests-2022-12-05</ext-link></comment></nlm-citation></ref><ref id="ref48"><label>48</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Bonilla</surname><given-names>M</given-names> </name><name name-style="western"><surname>Iradukunda</surname><given-names>S</given-names> </name><name name-style="western"><surname>Thomas</surname><given-names>P</given-names> </name></person-group><article-title>Sentiment analysis of public perception towards Elon Musk on Reddit (2008-2022)</article-title><source>Cardinal Edge</source><year>2023</year><volume>1</volume><issue>3</issue><fpage>7</fpage></nlm-citation></ref><ref id="ref49"><label>49</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ante</surname><given-names>L</given-names> </name></person-group><article-title>How Elon Musk&#x2019;s Twitter activity moves cryptocurrency markets</article-title><source>Technol Forecast Soc Change</source><year>2023</year><month>01</month><volume>186</volume><fpage>122112</fpage><pub-id pub-id-type="doi">10.1016/j.techfore.2022.122112</pub-id></nlm-citation></ref><ref id="ref50"><label>50</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ruan</surname><given-names>T</given-names> </name><name name-style="western"><surname>Lv</surname><given-names>Q</given-names> </name></person-group><article-title>Public perception of electric vehicles on Reddit and Twitter: a cross-platform analysis</article-title><source>Transportation Research Interdisciplinary Perspectives</source><year>2023</year><month>09</month><volume>21</volume><fpage>100872</fpage><pub-id pub-id-type="doi">10.1016/j.trip.2023.100872</pub-id></nlm-citation></ref><ref id="ref51"><label>51</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Metz</surname><given-names>C</given-names> </name><name name-style="western"><surname>Satariano</surname><given-names>A</given-names> </name><name name-style="western"><surname>Che</surname><given-names>C</given-names> </name></person-group><article-title>How Elon Musk became a geopolitical chaos agent</article-title><source>The New York Times</source><access-date>2025-04-19</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.nytimes.com/2022/10/26/technology/elon-musk-geopolitics-china-ukraine.html">https://www.nytimes.com/2022/10/26/technology/elon-musk-geopolitics-china-ukraine.html</ext-link></comment></nlm-citation></ref><ref id="ref52"><label>52</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Zehn</surname><given-names>MJ</given-names> </name></person-group><article-title>Elon Musk: a new antagonist in brazilian politics?</article-title><source>Brazilian Research and Studies Center</source><access-date>2025-04-19</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://bras-center.com/elon-musk-a-new-antagonist-in-brazilian-politics/">https://bras-center.com/elon-musk-a-new-antagonist-in-brazilian-politics/</ext-link></comment></nlm-citation></ref><ref id="ref53"><label>53</label><nlm-citation citation-type="web"><article-title>Brain computer interface market size, share &#x0026; trends analysis report by product (invasive, non-invasive), by application (healthcare, communication &#x0026; control), by end use, by region, and segment forecasts, 2024 - 2030</article-title><source>Grand View Research</source><access-date>2025-04-19</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.grandviewresearch.com/industry-analysis/brain-computer-interfaces-market">https://www.grandviewresearch.com/industry-analysis/brain-computer-interfaces-market</ext-link></comment></nlm-citation></ref><ref id="ref54"><label>54</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Yin</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Wan</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Fang</surname><given-names>H</given-names> </name><etal/></person-group><article-title>Bibliometric analysis on brain-computer interfaces in a 30-year period</article-title><source>Appl Intell</source><year>2023</year><month>06</month><volume>53</volume><issue>12</issue><fpage>16205</fpage><lpage>16225</lpage><pub-id pub-id-type="doi">10.1007/s10489-022-04226-4</pub-id></nlm-citation></ref><ref id="ref55"><label>55</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Osiceanu</surname><given-names>ME</given-names> </name></person-group><article-title>Psychological implications of modern technologies: &#x201C;technofobia&#x201D; versus &#x201C;technophilia&#x201D;</article-title><source>Procedia - Social and Behavioral Sciences</source><year>2015</year><month>05</month><volume>180</volume><fpage>1137</fpage><lpage>1144</lpage><pub-id pub-id-type="doi">10.1016/j.sbspro.2015.02.229</pub-id></nlm-citation></ref><ref id="ref56"><label>56</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>de Castro-Cros</surname><given-names>M</given-names> </name><name name-style="western"><surname>Sebastian-Romagosa</surname><given-names>M</given-names> </name><name name-style="western"><surname>Rodr&#x00ED;guez-Serrano</surname><given-names>J</given-names> </name><etal/></person-group><article-title>Effects of gamification in BCI functional rehabilitation</article-title><source>Front Neurosci</source><year>2020</year><volume>14</volume><fpage>882</fpage><pub-id pub-id-type="doi">10.3389/fnins.2020.00882</pub-id><pub-id pub-id-type="medline">32973435</pub-id></nlm-citation></ref><ref id="ref57"><label>57</label><nlm-citation citation-type="book"><person-group person-group-type="author"><name name-style="western"><surname>Gao</surname><given-names>C</given-names> </name><name name-style="western"><surname>Xia</surname><given-names>M</given-names> </name><name name-style="western"><surname>Zhang</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Han</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Gu</surname><given-names>Y</given-names> </name></person-group><article-title>Improving the Brain-Computer Interface Learning Process with Gamification in Motor Imagery: A Review</article-title><source>IntechOpen eBooks</source><year>2022</year><volume>21</volume><pub-id pub-id-type="doi">10.5772/intechopen.105715</pub-id></nlm-citation></ref><ref id="ref58"><label>58</label><nlm-citation citation-type="web"><article-title>Universal brain-computer interface lets people play games with just their thoughts</article-title><source>Cockrell School of Engineering</source><year>2024</year><access-date>2025-04-19</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://cockrell.utexas.edu/news/archive/9841-universal-brain-computer-interface-lets-people-play-games-with-just-their-thoughts">https://cockrell.utexas.edu/news/archive/9841-universal-brain-computer-interface-lets-people-play-games-with-just-their-thoughts</ext-link></comment></nlm-citation></ref><ref id="ref59"><label>59</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kumar</surname><given-names>S</given-names> </name><name name-style="western"><surname>Alawieh</surname><given-names>H</given-names> </name><name name-style="western"><surname>Racz</surname><given-names>FS</given-names> </name><name name-style="western"><surname>Fakhreddine</surname><given-names>R</given-names> </name><name name-style="western"><surname>Mill&#x00E1;n</surname><given-names>J del R</given-names> </name></person-group><article-title>Transfer learning promotes acquisition of individual BCI skills</article-title><source>PNAS Nexus</source><year>2024</year><month>02</month><day>1</day><volume>3</volume><issue>2</issue><fpage>2024</fpage><pub-id pub-id-type="doi">10.1093/pnasnexus/pgae076</pub-id></nlm-citation></ref><ref id="ref60"><label>60</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Hubner</surname><given-names>D</given-names> </name><name name-style="western"><surname>Schall</surname><given-names>A</given-names> </name><name name-style="western"><surname>Tangermann</surname><given-names>M</given-names> </name></person-group><article-title>Two player online brain-controlled chess</article-title><source>Annu Int Conf IEEE Eng Med Biol Soc</source><year>2019</year><month>07</month><volume>2019</volume><fpage>3018</fpage><lpage>3021</lpage><pub-id pub-id-type="doi">10.1109/EMBC.2019.8856965</pub-id><pub-id pub-id-type="medline">31946524</pub-id></nlm-citation></ref><ref id="ref61"><label>61</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Porter</surname><given-names>J</given-names> </name></person-group><article-title>Gabe Newell has big plans for brain-computer interfaces in gaming</article-title><source>The Verge</source><year>2021</year><access-date>2025-04-19</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.theverge.com/2021/1/25/22248202/gabe-newell-valve-brain-computer-interface-bci-meat-peripherals">https://www.theverge.com/2021/1/25/22248202/gabe-newell-valve-brain-computer-interface-bci-meat-peripherals</ext-link></comment></nlm-citation></ref><ref id="ref62"><label>62</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Hamad</surname><given-names>A</given-names> </name><name name-style="western"><surname>Jia</surname><given-names>B</given-names> </name></person-group><article-title>How virtual reality technology has changed our lives: an overview of the current and potential applications and limitations</article-title><source>Int J Environ Res Public Health</source><year>2022</year><month>09</month><day>8</day><volume>19</volume><issue>18</issue><fpage>11278</fpage><pub-id pub-id-type="doi">10.3390/ijerph191811278</pub-id><pub-id pub-id-type="medline">36141551</pub-id></nlm-citation></ref><ref id="ref63"><label>63</label><nlm-citation citation-type="web"><article-title>Musk&#x2019;s Neuralink shows first brain-chip patient playing online chess</article-title><source>Reuters</source><year>2024</year><access-date>2025-04-19</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.reuters.com/business/healthcare-pharmaceuticals/neuralink-shows-first-brain-chip-patient-playing-online-chess-2024-03-21">https://www.reuters.com/business/healthcare-pharmaceuticals/neuralink-shows-first-brain-chip-patient-playing-online-chess-2024-03-21</ext-link></comment></nlm-citation></ref><ref id="ref64"><label>64</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Tangermann</surname><given-names>V</given-names> </name></person-group><article-title>First Neuralink patient says implant has given him incredible gaming skills</article-title><source>Futurism</source><year>2024</year><access-date>2025-04-19</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://futurism.com/neoscope/first-neuralink-patient-gaming-skills">https://futurism.com/neoscope/first-neuralink-patient-gaming-skills</ext-link></comment></nlm-citation></ref><ref id="ref65"><label>65</label><nlm-citation citation-type="web"><person-group person-group-type="author"><collab>PowerfulJRE</collab></person-group><article-title>Joe Rogan experience #2167&#x2014;Noland Arbaugh</article-title><source>YouTube</source><access-date>2025-04-19</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.youtube.com/watch?v=Bfo2xIeaOAE">https://www.youtube.com/watch?v=Bfo2xIeaOAE</ext-link></comment></nlm-citation></ref><ref id="ref66"><label>66</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Bardhan</surname><given-names>A</given-names> </name></person-group><article-title>Twitch streamer plays Elden Ring using only her brain</article-title><source>Kotaku</source><year>2023</year><access-date>2025-04-19</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://kotaku.com/twitch-streamer-elden-ring-play-brain-eeg-perrikaryal-1850024234">https://kotaku.com/twitch-streamer-elden-ring-play-brain-eeg-perrikaryal-1850024234</ext-link></comment></nlm-citation></ref><ref id="ref67"><label>67</label><nlm-citation citation-type="web"><article-title>Video game market size, share &#x0026; trends analysis report by device (console, mobile, computer), by type (online, offline), by region (Asia Pacific, North America, Europe), and segment forecasts, 2023 - 2030</article-title><source>Grand View Research</source><access-date>2025-04-19</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.grandviewresearch.com/industry-analysis/video-game-market">https://www.grandviewresearch.com/industry-analysis/video-game-market</ext-link></comment></nlm-citation></ref><ref id="ref68"><label>68</label><nlm-citation citation-type="web"><article-title>The future of philanthropy</article-title><source>Fidelity Charitable</source><year>2021</year><access-date>2025-04-19</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.fidelitycharitable.org/content/dam/fc-public/docs/resources/2021-future-of-philanthropy-summary.pdf">https://www.fidelitycharitable.org/content/dam/fc-public/docs/resources/2021-future-of-philanthropy-summary.pdf</ext-link></comment></nlm-citation></ref></ref-list><app-group><supplementary-material id="app1"><label>Multimedia Appendix 1</label><p>Additional diagrams, tables, and figures.</p><media xlink:href="formative_v9i1e60859_app1.docx" xlink:title="DOCX File, 1106 KB"/></supplementary-material></app-group></back></article>