<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" article-type="review-article">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Med.</journal-id>
<journal-title>Frontiers in Medicine</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Med.</abbrev-journal-title>
<issn pub-type="epub">2296-858X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fmed.2021.733241</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Medicine</subject>
<subj-group>
<subject>Review</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Augmented Reality in Ophthalmology: Applications and Challenges</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Li</surname> <given-names>Tongkeng</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="author-notes" rid="fn002"><sup>&#x02020;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1298350/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Li</surname> <given-names>Chenghao</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="author-notes" rid="fn002"><sup>&#x02020;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1444061/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Zhang</surname> <given-names>Xiayin</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<xref ref-type="author-notes" rid="fn002"><sup>&#x02020;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1042926/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Liang</surname> <given-names>Wenting</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Chen</surname> <given-names>Yongxin</given-names></name>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Ye</surname> <given-names>Yunpeng</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Lin</surname> <given-names>Haotian</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x0002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1045270/overview"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>State Key Laboratory of Ophthalmology, Zhongshan Ophthalmic Center, Sun Yat-sen University</institution>, <addr-line>Guangzhou</addr-line>, <country>China</country></aff>
<aff id="aff2"><sup>2</sup><institution>Zhongshan School of Medicine, Sun Yat-sen University</institution>, <addr-line>Guangzhou</addr-line>, <country>China</country></aff>
<aff id="aff3"><sup>3</sup><institution>Guangdong Eye Institute, Department of Ophthalmology, Guangdong Provincial People&#x00027;s Hospital, Guangdong Academy of Medical Sciences</institution>, <addr-line>Guangzhou</addr-line>, <country>China</country></aff>
<aff id="aff4"><sup>4</sup><institution>School of Biomedical Engineering, Sun Yat-sen University</institution>, <addr-line>Guangzhou</addr-line>, <country>China</country></aff>
<aff id="aff5"><sup>5</sup><institution>Center for Precision Medicine, Sun Yat-sen University</institution>, <addr-line>Guangzhou</addr-line>, <country>China</country></aff>
<author-notes>
<fn fn-type="edited-by"><p>Edited by: Eray Atalay, Eski&#x0015F;ehir Osmangazi University, Turkey</p></fn>
<fn fn-type="edited-by"><p>Reviewed by: Miaomiao Xu, Facebook Reality Labs Research, United States; Kiyoshi Kiyokawa, Nara Institute of Science and Technology (NAIST), Japan</p></fn>
<corresp id="c001">&#x0002A;Correspondence: Haotian Lin <email>linht5&#x00040;mail.sysu.edu.cn</email></corresp>
<fn fn-type="other" id="fn001"><p>This article was submitted to Ophthalmology, a section of the journal Frontiers in Medicine</p></fn>
<fn fn-type="equal" id="fn002"><p>&#x02020;These authors have contributed equally to this work and share first authorship</p></fn></author-notes>
<pub-date pub-type="epub">
<day>10</day>
<month>12</month>
<year>2021</year>
</pub-date>
<pub-date pub-type="collection">
<year>2021</year>
</pub-date>
<volume>8</volume>
<elocation-id>733241</elocation-id>
<history>
<date date-type="received">
<day>30</day>
<month>06</month>
<year>2021</year>
</date>
<date date-type="accepted">
<day>19</day>
<month>11</month>
<year>2021</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x000A9; 2021 Li, Li, Zhang, Liang, Chen, Ye and Lin.</copyright-statement>
<copyright-year>2021</copyright-year>
<copyright-holder>Li, Li, Zhang, Liang, Chen, Ye and Lin</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p></license> </permissions>
<abstract><p>Augmented reality (AR) has been developed rapidly and implemented in many fields such as medicine, maintenance, and cultural heritage. Unlike other specialties, ophthalmology connects closely with AR since most AR systems are based on vision systems. Here we summarize the applications and challenges of AR in ophthalmology and provide insights for further research. Firstly, we illustrate the structure of the standard AR system and present essential hardware. Secondly, we systematically introduce applications of AR in ophthalmology, including therapy, education, and clinical assistance. To conclude, there is still a large room for development, which needs researchers to pay more effort. Applications in diagnosis and protection might be worth exploring. Although the obstacles of hardware restrict the development of AR in ophthalmology at present, the AR will realize its potential and play an important role in ophthalmology in the future with the rapidly developing technology and more in-depth research.</p></abstract>
<kwd-group>
<kwd>augmented reality</kwd>
<kwd>ophthalmology</kwd>
<kwd>therapy</kwd>
<kwd>education</kwd>
<kwd>clinical assistance</kwd>
</kwd-group>
<counts>
<fig-count count="5"/>
<table-count count="0"/>
<equation-count count="0"/>
<ref-count count="78"/>
<page-count count="12"/>
<word-count count="8279"/>
</counts>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="s1">
<title>Introduction</title>
<p>Augmented reality (AR) is a technology that enhances the natural environment with computer-generated information in real-time (<xref ref-type="bibr" rid="B1">1</xref>). AR is not restricted to the visual sense. It can be implemented for all feelings, including sight, hearing, smelling, and touching (<xref ref-type="bibr" rid="B2">2</xref>). Besides adding virtual information to the natural environment, AR applications include removing or processing real objects from the real environment, more commonly called mediated reality or diminished reality (<xref ref-type="bibr" rid="B1">1</xref>, <xref ref-type="bibr" rid="B2">2</xref>). Unlike virtual reality (VR), which completely immerses users in a computer-generated virtual world, AR is based on the natural world and enhances the real environment with computer-generated information (<xref ref-type="bibr" rid="B1">1</xref>).</p>
<p>As a developing technology, AR has drawn the interest of researchers from different fields. Besides, AR has attracted companies like Google and Microsoft, which created AR devices such as Google Glass and HoloLens, providing hardware foundations for subsequent research. With the help of researchers and companies, AR has been developed rapidly and implemented in many fields such as medicine, maintenance, and cultural heritage (<xref ref-type="bibr" rid="B3">3</xref>).</p>
<p>Healthcare has become one of the pioneers, especially for applications requiring guidance and assistance (<xref ref-type="bibr" rid="B4">4</xref>). For example, AR has been certified effective in medical education and training, surgery navigation, and gastrointestinal endoscopy (<xref ref-type="bibr" rid="B5">5</xref>&#x02013;<xref ref-type="bibr" rid="B7">7</xref>). However, unlike other specialties, ophthalmology connects closely with AR since most AR systems are based on vision generated by the eyes. Therefore, AR has particular applications in ophthalmology. Especially in the therapy of ocular diseases, AR possesses enormous potential to provide alternative or adjuvant choices in non-invasive and convenient ways to benefit patients who could not receive traditional medicine and surgical treatment. In addition to therapy, AR also has been implemented in education and clinical assistance in ophthalmology.</p>
<p>In this article, we reviewed AR in ophthalmology, summarized the applications and challenges of AR, and provided some suggestions for further research. We illustrated the structure of an AR system in ophthalmology, presented essential hardware, and systematically introduced AR&#x00027;s applications in ophthalmology, including applications in therapy, education, and clinical assistance. The ocular diseases that have been applied with AR in therapy include visual field defects, color vision deficiency, low vision, blindness, nyctalopia, metamorphopsia, and amblyopia. Applications in education contain medical education and public education. Applications in clinical assistance involve combining optical coherence tomography (OCT) and AR in surgery, deep anterior lamellar keratoplasty surgery navigation, and slit-lamps examination assistance.</p></sec>
<sec id="s2">
<title>Structure of an AR System in Ophthalmology</title>
<sec>
<title>Overview</title>
<p>As shown in <xref ref-type="fig" rid="F1">Figure 1</xref>, an AR system includes three primary modules: video capturing, processing, and displaying. The camera captures the natural environment and then transmits it to the computing unit for processing. Finally, the processed information is reflected on the display.</p>
<fig id="F1" position="float">
<label>Figure 1</label>
<caption><p>Structure of an augmented reality system in ophthalmology. The boxes filled in blue indicate the system&#x00027;s procedures, while the boxes in red indicate the devices connected with the procedures. The camera captures the natural environment and then transmits it to the computing unit for processing. Finally, the processed information is reflected on the display.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fmed-08-733241-g0001.tif"/>
</fig></sec>
<sec>
<title>Camera</title>
<p>There are many kinds of cameras for video capturing, depending significantly on the type of application. For instance, if developers need the function to evaluate the depth of the space, they probably will use a depth sensor camera. The stereo camera can support the 3D reconstruction of the real world.</p></sec>
<sec>
<title>Computing Unit</title>
<p>The AR systems used to use personal computers as their computing center. With the invention of smartphones and microcomputers, the computing center became portable and subtle or directly embedded in the display.</p></sec>
<sec>
<title>Display</title>
<p>The displays applied to AR in ophthalmology include head-mounted displays (HMDs) and handheld displays. HMDs are displays worn on the head to place images over the user&#x00027;s view. The display technologies can be video see-through (VST) or optical see-through (OST) (<xref ref-type="bibr" rid="B8">8</xref>). VST display uses video, which integrates virtual and natural environments to cover a complete view of the user, and the user cannot see the natural environment directly. On the contrary, the OST display only overlays the virtual images on the field of vision, and the user can see the natural environment as usual. The handheld display, like a smartphone, is a VST display with a small computing center held in the user&#x00027;s hands.</p></sec>
<sec>
<title>Major AR Prototypes</title>
<p>There are two major AR prototypes: HMD-based AR systems and smartphone-based AR systems (<xref ref-type="bibr" rid="B9">9</xref>). HMD-based AR systems include AR systems produced by commercial companies like Google and Microsoft or homemade AR systems. As for some mainstream AR systems on the market, Google Glass, Microsoft HoloLens, and Epson Moverio BT series use OST display while the HTC Vive and Oculus use VST display. All of them have systems and give the users a mature experience. With the progress and maturity of technology, smartphones gradually grew into multipurpose tools, even replacing personal computers. With the miniature computing units and a high-resolution camera, smartphones can also be a platform to carry and achieve some AR applications. Compared with HMD-based AR systems, smartphone-based AR systems are more portable and cheaper, making it more widespread to promote smartphone-based AR systems (<xref ref-type="bibr" rid="B8">8</xref>). However, due to smartphones are handheld displays, they are not expected to be used for a long time. Using HMD is not only more comfortable than using a smartphone but also a better visual experience.</p></sec></sec>
<sec id="s3">
<title>Applications of AR in Ophthalmology</title>
<sec>
<title>Applications in Therapy</title>
<p>To meet the authoritative standard, we classified applications in therapy of ocular diseases according to the International Classification of Diseases 10th published by the World Health Organization<xref ref-type="fn" rid="fn0001"><sup>1</sup></xref> The ocular diseases that have been applied with AR include visual field defects, color vision deficiency, low vision, blindness, nyctalopia, metamorphopsia, and amblyopia.</p>
<sec>
<title>Visual Field Defects</title>
<p>Several ophthalmic diseases could cause visual field defects (VFD), including glaucoma, stroke, and retinitis pigmentosa (<xref ref-type="bibr" rid="B10">10</xref>). Visual field defects would bring difficulties in patients&#x00027; daily life such as driving, crossing the road, reading, and visual searching (<xref ref-type="bibr" rid="B11">11</xref>&#x02013;<xref ref-type="bibr" rid="B13">13</xref>). Due to the restricted vision field, the patients with VFD are less sensitive to surrounding dangers, threatening their health severely. In addition, some VFD are caused by brain injury, which cannot be reversed by traditional medicine or surgical treatment (<xref ref-type="bibr" rid="B14">14</xref>). Fortunately, the appearance of AR gives a considerable solution that provides a visual aid and improves searching capability.</p>
<p>Image remapping, overlaid window, visual multiplexing, and danger indicator has been developed to provide a visual aid (<xref ref-type="fig" rid="F2">Figure 2</xref>). Sayed et al. proposed customized digital spectacles with the image remapping method (<xref ref-type="bibr" rid="B10">10</xref>). The users were asked to measure their visual field at first. Then the images captured by the camera were remapped using resizing and shifting algorithms to adapt to the measured visual field (<xref ref-type="fig" rid="F2">Figure 2B</xref>). A series of prospective case studies have been held to verify the functional field&#x00027;s efficiency (<xref ref-type="bibr" rid="B10">10</xref>, <xref ref-type="bibr" rid="B18">18</xref>). However, digital spectacles cannot preserve the user&#x00027;s original vision. Another method implemented an overlaid window to display the overview scene captured by the camera (<xref ref-type="fig" rid="F2">Figure 2C</xref>). The window is overlaid on the actual visual field of the user (<xref ref-type="bibr" rid="B15">15</xref>). Unfortunately, this method had an inherent contradiction between the augmented contextual information and local unscreened information since the overlaid window would inevitably block the natural view. Peli proposed visual multiplexing, defined as two or more signals transmitted on the same channel. In this way, the complete information can be used at the receiving end (<xref ref-type="bibr" rid="B19">19</xref>, <xref ref-type="bibr" rid="B20">20</xref>). Spatial multiplexing, one of the visual multiplexing methods, has been implemented to aid tunnel vision (<xref ref-type="bibr" rid="B16">16</xref>). The minified edge images of the environmental scene were overlaid onto the user&#x00027;s natural vision (<xref ref-type="fig" rid="F2">Figure 2D</xref>). The edge pixels did not block the realistic view since they only occupied a tiny part of a field of view. Another AR system is developed to notice patients with the tunnel vision of surrounding danger (<xref ref-type="bibr" rid="B17">17</xref>). The system would track the moving objects in real-time and extract their characteristics to determine their dangerous degree. The circles are superimposed on the edge of the visual field, and the perilous degree determines their color while moving objects assess their position (<xref ref-type="fig" rid="F2">Figure 2E</xref>). Similarly, Ichinose et al. used edge indicator substitute circles to supplement the information in the lost vision of the patients (<xref ref-type="bibr" rid="B21">21</xref>).</p>
<fig id="F2" position="float">
<label>Figure 2</label>
<caption><p>Different methods to aid patients with visual field defects. <bold>(A)</bold> Healthy vision. <bold>(B)</bold> The remapping method (<xref ref-type="bibr" rid="B10">10</xref>). Patients with visual field defects cannot see the entire scene. After remapping, patients can see the entire scene in their residual visual field. <bold>(C)</bold> The method of overlaying the overview window (<xref ref-type="bibr" rid="B15">15</xref>). After overlaying an overview window, patients can perceive the entire scene and natural vision simultaneously. <bold>(D)</bold> The method of using visual multiplexing (<xref ref-type="bibr" rid="B16">16</xref>). Patients can perceive the counter view of the entire scene and natural vision simultaneously. <bold>(E)</bold> The method of implementing danger indicators (<xref ref-type="bibr" rid="B17">17</xref>). Patients with tunnel vision cannot notice the danger the Unidentified Flying Object brought. The danger indicators can help patients notice surrounding dangers.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fmed-08-733241-g0002.tif"/>
</fig>
<p>Despite providing a visual aid for the VFD patient, improving searching capability is another direction. In Zhao&#x00027;s study, CueSee offered five visual cues for users who cannot finish visual search tasks (<xref ref-type="bibr" rid="B13">13</xref>). Among them, guidelines and sun rays are the cues designed for users with peripheral vision loss. The former connected the center of the display and the object with a red guideline, and the latter converged the center of the target with eight red guidelines. The location of the object could be indicated in these ways.</p></sec>
<sec>
<title>Color Vision Deficiency</title>
<p>Color vision deficiency (CVD), also known as color blindness, is a group of ophthalmic diseases that affect 8% of males globally. Patients have difficulties perceiving and distinguishing specific colors (<xref ref-type="bibr" rid="B22">22</xref>, <xref ref-type="bibr" rid="B23">23</xref>). CVD brings obstacles to patients&#x00027; daily life and restricts their occupations (<xref ref-type="bibr" rid="B24">24</xref>). Although CVD cannot be cured by medical treatment right now, the AR system could help users improve their ability to distinguishing colors and even perform close to healthy people (<xref ref-type="bibr" rid="B24">24</xref>).</p>
<p>Several commercial AR devices provide prototypes for CVD aiding research (<xref ref-type="bibr" rid="B24">24</xref>). Omnicolor and Chroma are applications based on Google Glass, one of the most popular intelligent glasses (<xref ref-type="bibr" rid="B25">25</xref>, <xref ref-type="bibr" rid="B26">26</xref>). Popleteev&#x00027;s applications and Chroma glasses are based on Epson&#x00027;s Moverio, another AR device cheaper than Google Glass (<xref ref-type="bibr" rid="B27">27</xref>, <xref ref-type="bibr" rid="B28">28</xref>). Besides, Schmitt et al. developed applications on the smartphone to help patients with CVD (<xref ref-type="bibr" rid="B29">29</xref>). Other researchers assemble homemade AR systems or modify existing devices (<xref ref-type="bibr" rid="B25">25</xref>, <xref ref-type="bibr" rid="B26">26</xref>, <xref ref-type="bibr" rid="B30">30</xref>).</p>
<p>The processing technologies can be classified into two categories: substituting colors and augmenting visual information (<xref ref-type="fig" rid="F3">Figure 3</xref>). Substituting colors is a group of strategies to transfer the target color to another one, including daltonization, highlight, and contrast. The most representative and popular algorithm is daltonization (<xref ref-type="fig" rid="F3">Figure 3B</xref>), which attempts to shift colors to achieve less confusing color combinations for patients with CVD (<xref ref-type="bibr" rid="B25">25</xref>&#x02013;<xref ref-type="bibr" rid="B27">27</xref>, <xref ref-type="bibr" rid="B31">31</xref>, <xref ref-type="bibr" rid="B32">32</xref>). Besides, the highlight method refers to highlight the target colors by replacing them with colors that are easier to see [(<xref ref-type="bibr" rid="B26">26</xref>, <xref ref-type="bibr" rid="B28">28</xref>, <xref ref-type="bibr" rid="B29">29</xref>); <xref ref-type="fig" rid="F3">Figure 3C</xref>]. In addition, color contrast is a special algorithm for distinguishing a pair of colors (<xref ref-type="bibr" rid="B26">26</xref>, <xref ref-type="bibr" rid="B28">28</xref>, <xref ref-type="bibr" rid="B29">29</xref>). The mechanism of the algorithm is to transfer the target pair of colors to another particular pair of colors that are easier to distinguish (<xref ref-type="fig" rid="F3">Figure 3D</xref>). Augmented visual information includes outline the shape of the area of target colors and indicates the target color with texts or icons [(<xref ref-type="bibr" rid="B26">26</xref>, <xref ref-type="bibr" rid="B28">28</xref>&#x02013;<xref ref-type="bibr" rid="B30">30</xref>); <xref ref-type="fig" rid="F3">Figures 3E,F</xref>].</p>
<fig id="F3" position="float">
<label>Figure 3</label>
<caption><p>Different methods to help patients with color blindness. The top figure indicates healthy vision in each pair of figures, while the bottom indicates patients with protanopia. <bold>(A)</bold> The natural scene. The patients with protanopia cannot distinguish the apples in red and the leaves in green. <bold>(B)</bold> The daltonization method (<xref ref-type="bibr" rid="B27">27</xref>). All colors are shifted to achieve less confusing color combinations. <bold>(C)</bold> The highlight method (<xref ref-type="bibr" rid="B26">26</xref>). Once the patients want to distinguish any color, the target color will be replaced by other colors that are easier to distinguish. In this case, red is replaced by pink. <bold>(D)</bold> The contrast method (<xref ref-type="bibr" rid="B26">26</xref>). In this case, red is replaced by yellow, and green is replaced by blue. <bold>(E)</bold> The outline method (<xref ref-type="bibr" rid="B26">26</xref>). In this case, the areas in red are outlined in order to be distinguished easily. <bold>(F)</bold> The method of using icons (<xref ref-type="bibr" rid="B30">30</xref>). In this case, the areas in red are indicated by the arrows.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fmed-08-733241-g0003.tif"/>
</fig></sec>
<sec>
<title>Blindness</title>
<p>Blindness is defined as the best-corrected visual acuity of a patient&#x00027;s better-seeing eye is &#x0003C;20/400 (<xref ref-type="bibr" rid="B33">33</xref>). The dominant causes of blindness are age-related diseases, such as age-related macular degeneration (<xref ref-type="bibr" rid="B34">34</xref>). Up to now, there is no therapy that can reverse blindness since the diseases disturb the transmission of visual data from the eye to the brain. Blindness brings severe obstacles to the patient&#x00027;s life, especially restricting their mobility. Perceiving the surrounding environment and avoiding obstacles are essential for improving mobility. Therefore, several distance-based vision aid AR systems have been proposed (<xref ref-type="bibr" rid="B35">35</xref>&#x02013;<xref ref-type="bibr" rid="B38">38</xref>). These AR systems make use of color perception, light perception, and hearing to convey information. For patients with blindness who still can perceive color vision, two studies use colors to indicate distance (<xref ref-type="fig" rid="F4">Figures 4A,B</xref>). Their system is applied on Microsoft HoloLens, calculating the distances between objects and users based on the video stream (<xref ref-type="bibr" rid="B35">35</xref>, <xref ref-type="bibr" rid="B37">37</xref>). Similarly, for patients considered blind but retain light perception, Hick et al. developed an HMD using the brightness of the light-emitting diodes to inform the patients about the distance (<xref ref-type="bibr" rid="B36">36</xref>). While the distance between object and user was shortened, the light-emitting diodes would be bright (<xref ref-type="fig" rid="F4">Figure 4C</xref>). For total blindness, the spatialized sound was used to express the distances in Liu&#x00027;s study (<xref ref-type="bibr" rid="B38">38</xref>). The intensity of the sound increased as the distance shortened.</p>
<fig id="F4" position="float">
<label>Figure 4</label>
<caption><p>Aiding the patients with low vision and blindness. <bold>(A&#x02013;C)</bold> Illustrate using distance-based vision aid AR system to help the blind. <bold>(A)</bold> The natural scene. <bold>(B)</bold> The method using colors to indicate distances (<xref ref-type="bibr" rid="B37">37</xref>). The area in warmer colors indicates the closer distances, while the area in cooler colors indicates the farther. <bold>(C)</bold> The method using brightness to indicate distances (<xref ref-type="bibr" rid="B36">36</xref>). Brighter indicates closer distance while darker indicates farther. <bold>(D&#x02013;I)</bold> Illustrate different methods to aid the patients with low vision (<xref ref-type="bibr" rid="B39">39</xref>). <bold>(D)</bold> The natural vision of patients with low vision. <bold>(E)</bold> The magnification method. <bold>(F)</bold> The edge enhancement method. <bold>(G)</bold> The contrast enhancement method. <bold>(H)</bold> The text extraction method. <bold>(I)</bold> The object recognition method.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fmed-08-733241-g0004.tif"/>
</fig></sec>
<sec>
<title>Low Vision</title>
<p>Low vision is defined as the best-corrected visual acuity of a patient&#x00027;s better-seeing eye is better than 20/400 and &#x0003C;20/60 (<xref ref-type="bibr" rid="B33">33</xref>). Patients with low vision have difficulties in recognizing things such as reading. The applications of AR in low vision aid focus on strengthening the recognizing capability in different strategies, including magnification, edge enhancement, contrast enhancement, object recognition, and text extraction. Magnification is the single most common strategy (<xref ref-type="bibr" rid="B39">39</xref>&#x02013;<xref ref-type="bibr" rid="B43">43</xref>). The images after magnification are showed in a window or covered the user&#x00027;s sight (<xref ref-type="fig" rid="F4">Figures 4D,E</xref>). However, both of them are inevitable to reduce the field of view. Therefore, some research adjusts the transparency of the magnified images for the patients to see the real environment and magnified images together (<xref ref-type="bibr" rid="B40">40</xref>, <xref ref-type="bibr" rid="B43">43</xref>). Besides, edge enhancement can avoid this trouble, enhancing the edge of the objects while remaining the view of the users [(<xref ref-type="bibr" rid="B13">13</xref>, <xref ref-type="bibr" rid="B39">39</xref>, <xref ref-type="bibr" rid="B42">42</xref>, <xref ref-type="bibr" rid="B44">44</xref>); <xref ref-type="fig" rid="F4">Figure 4F</xref>]. Hwang and Peli used the positive and negative Laplacian filters to enhance the edges (<xref ref-type="bibr" rid="B44">44</xref>). The former one would highlight the edge with clear surroundings, while the latter one is the opposite. In their AR system, the users can choose one of three levels to enhance the edge according to their situation. In addition, contrast enhancement helps users recognize things by increasing the contrast of the images [(<xref ref-type="bibr" rid="B13">13</xref>, <xref ref-type="bibr" rid="B39">39</xref>, <xref ref-type="bibr" rid="B42">42</xref>); <xref ref-type="fig" rid="F4">Figure 4G</xref>]. In Zhao&#x00027;s study, the contrast enhancement methods include maintaining the hues while increasing the contrast and image binarization processing (<xref ref-type="bibr" rid="B39">39</xref>). In recent years, with the rapid development of artificial intelligence, especially convolutional neural networks, breakthroughs have been made in image recognition (<xref ref-type="bibr" rid="B45">45</xref>). Among them, object or facial recognition technology [(<xref ref-type="bibr" rid="B39">39</xref>, <xref ref-type="bibr" rid="B42">42</xref>, <xref ref-type="bibr" rid="B45">45</xref>); <xref ref-type="fig" rid="F4">Figure 4H</xref>] and text extraction [(<xref ref-type="bibr" rid="B39">39</xref>, <xref ref-type="bibr" rid="B40">40</xref>, <xref ref-type="bibr" rid="B46">46</xref>); <xref ref-type="fig" rid="F4">Figure 4I</xref>] using optical character recognition have been combined with AR as strategies to improve recognizing capability of the user with low vision. After object or facial recognition and text extraction, the AR systems return with audio feedback and text information.</p></sec>
<sec>
<title>Nyctalopia, Metamorphopsia, and Amblyopia</title>
<p>In contrast to color blindness, nyctalopia has problems with rods rather than cones (<xref ref-type="bibr" rid="B47">47</xref>). Therefore, patients with nyctalopia cannot recognize things clearly in a dark environment (<xref ref-type="fig" rid="F5">Figures 5A,B</xref>). AR has offered a new way in the therapy of nyctalopia by brightening the vision in real-time. Hu et al. proposed a night vision enhancement based on see-through glasses (<xref ref-type="bibr" rid="B48">48</xref>). The glasses first inverted the dark image, then used the de-hazing algorithms to process the inverted image. After that, the processed image will be resized and calibrated to the real environment (<xref ref-type="fig" rid="F5">Figure 5C</xref>). Another research developed Troyoculus, which used the self-illumination filter to brighten the video streaming and implemented a bright excess filter to prevent bright excess (<xref ref-type="bibr" rid="B50">50</xref>). Troyoculus was developed on two prototypes, HMD and smartphone.</p>
<fig id="F5" position="float">
<label>Figure 5</label>
<caption><p>Aiding patients with amblyopia and nyctalopia. <bold>(A&#x02013;C)</bold> Illustrate aiding patients with nyctalopia (<xref ref-type="bibr" rid="B48">48</xref>). <bold>(A)</bold> The healthy vision. <bold>(B)</bold> The vision of patients with nyctalopia. <bold>(C)</bold> The vision of patients with nyctalopia after using the AR systems. The part of the scene is brightened to help the patients. <bold>(D&#x02013;F)</bold> Illustrate aiding patients with amblyopia (<xref ref-type="bibr" rid="B49">49</xref>). The patients are forced to use their lazy eyes more important than healthy eyes. In this case, patients play games that ask them to crush the roaming stones. <bold>(D)</bold> The binocular vision. <bold>(E)</bold> The vision of the healthy eye, containing the game menus that do not need too much attention. <bold>(F)</bold> The vision of the lazy eye, containing roaming stones that require high concentration.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fmed-08-733241-g0005.tif"/>
</fig>
<p>Metamorphopsia is a group of macular disorders that cause the patient&#x00027;s sight to twist (<xref ref-type="bibr" rid="B51">51</xref>). The common causes of metamorphopsia include age-related macular degeneration, diabetic macular edema, and vitreoretinal interface disorders. Bozzelli developed an AR system that compensated or reduced visual geometric distortions caused by metamorphopsia in real-time, according to the precise measurement (<xref ref-type="bibr" rid="B52">52</xref>). The system tested and adjusted its algorithms constantly until the user&#x00027;s vision was corrected. The correction algorithm mapped the video streams onto a polygonal mesh and offset its vertices.</p>
<p>Amblyopia is a disorder that affects the spatial vision of monocular or binocular (<xref ref-type="bibr" rid="B53">53</xref>). It is a developmental disease caused by strabismus or anisometropia. Several VR and AR systems are developed to correct amblyopia by providing games that force users to use their lazy eyes more important than healthy eyes (<xref ref-type="fig" rid="F5">Figures 5D&#x02013;F</xref>). While using the VR systems, users are completely immersed in the virtual environment, which increases the occurrence of accidents. On the contrary, users can see the real environment while using the AR system. Therefore, AR is considered as a better substitution than VR in amblyopia therapy (<xref ref-type="bibr" rid="B49">49</xref>).</p></sec></sec>
<sec>
<title>Applications in Education</title>
<sec>
<title>Medical Education</title>
<p>Ophthalmic surgery and diagnosis are difficult to get started due to their meticulousness. As a result, medical students need to practice a lot to be qualified. However, many students cannot get enough chances to practice because of the lack of cases. The surgical or diagnostic training simulators based on VR or AR systems have been presented to solve this problem. Students can acquire adequate practice through simulators to improve surgical or diagnostic quality. An inevitable defect of the VR simulators is that VR blocks the connection with the real environment. On the contrary, the AR simulators can reserve the real environment while simulating the surgical or diagnostic information. The AR simulators have been implemented in direct or indirect ophthalmoscopy and microsurgery. Schuppe et al. presented EYESI, a training simulator for indirect ophthalmoscopy, in 2009 for the first time (<xref ref-type="bibr" rid="B54">54</xref>). EYESI could show the hands of the examiner from the real world while simulating the patient and fundus. Besides, the AR training simulator of indirect ophthalmoscopy has been developed on low-cost mobile platforms (<xref ref-type="bibr" rid="B55">55</xref>). The simulator has two prototypes. One is based on the smartphone used as a direct ophthalmoscope, and the other is based on HMD. Although it was helpful for users to learn ophthalmoscopy, most participants thought they could not master this technology. Moreover, Ropelato et al. developed an AR system for training micromanipulation skills, equipped with a simulative training environment and assessing system (<xref ref-type="bibr" rid="B56">56</xref>). In this system, Microsoft HoloLens presented a surgical environment that allowed users to stay in touch with the real world, such as the real instruments or the assistants involved in the surgical procedure. Similarly, CatAR was developed for cataract surgical training (<xref ref-type="bibr" rid="B57">57</xref>). What had improved was that they updated the display&#x00027;s resolution, which improved the reality of the simulated surgery.</p></sec>
<sec>
<title>Public Education</title>
<p>Since ophthalmic diseases might cause severe consequences and significantly impact life quality, prevention is more important than treatment (<xref ref-type="bibr" rid="B58">58</xref>). Besides, although the public might know a little about ophthalmology, a healthy person might not understand how badly the patients suffer from ocular diseases (<xref ref-type="bibr" rid="B59">59</xref>). As a result, it is necessary to educate and inform the public. The following research has developed several devices to simulate ophthalmic diseases. Ates et al. presented a low-cost simulation named SIMVIZ (<xref ref-type="bibr" rid="B60">60</xref>). Two wide-angle cameras catch the real world, and then the filters deal with the video stream in different ways according to different disease modes and finally send it to the user&#x00027;s sight. The filters can simulate macular degeneration, diabetic retinopathy, glaucoma, cataracts, color blindness, and diplopia using different algorithms. However, SIMVIZ provides an immersive simulative environment for users. It has problems of inconvenience, low resolution, and accuracy. Similarly, Jones et al. proposed methods to simulate six vision, including disability glare, blur, metamorphopsia, perceptual filling-in, and color vision deficits (<xref ref-type="bibr" rid="B61">61</xref>). These methods are implemented in smartphones and HMD. In subsequent research, Jones proved the effectiveness of these methods (<xref ref-type="bibr" rid="B62">62</xref>). Moreover, an eye-tracked AR system was developed to improve the accuracy of simulated cataracts (<xref ref-type="bibr" rid="B63">63</xref>). The processing system included the following parts: reducing visual acuity, reducing contrast, applying color shift, simulating dark shadows, and simulating sensitivity to light. In order to find the best parameters, they conducted interactive experiments with cataract patients. The cataract patients had undergone surgery on one eye while the other had not. They were asked to compare the simulation to their cataract view and return the result to adjust the parameters. In the end, several parameters were constantly adjusted to attach the best simulation. Furthermore, simulating the vision of patients with ocular diseases can enlarge the sample size for research (<xref ref-type="bibr" rid="B64">64</xref>).</p></sec></sec>
<sec>
<title>Applications in Clinical Assistance</title>
<p>Surgery and diagnosis are important but challenging in ophthalmology since they require a quantity of experience (<xref ref-type="bibr" rid="B65">65</xref>). Except for helping medical students practice in the classroom, AR has been implemented in real-time clinical assisting. The AR system using additional information such as imaging information or diagnostic standard to improve the quality of surgery or diagnosis. OCT is a technique that can obtain high tissue resolution images, and the optical microscope and OCT can share the same optical path (<xref ref-type="bibr" rid="B66">66</xref>). As a result, it has great potential to be introduced to ophthalmic surgery with AR systems. Combining AR and OCT in ophthalmic surgery has been explored to improve the accuracy of the surgery (<xref ref-type="bibr" rid="B67">67</xref>, <xref ref-type="bibr" rid="B68">68</xref>). An advantage of OCT is that it can perform three-dimensional reconstruction of scanned images and provide any section image or depth stereo image at surgeons&#x00027; will. Through the AR system, the OCT images can be integrated with the surgery scene in real-time, and the surgeon can investigate the OCT images and surgery scene simultaneously. In Roodaki et al.&#x00027;s study, the vertical section was provided to inform the surgeon of the distance between surgical instruments and fundamental ocular tissues (<xref ref-type="bibr" rid="B67">67</xref>). In another study, different depths of the stereo image were used to provide vivid information on tissues (<xref ref-type="bibr" rid="B68">68</xref>). Besides, the AR system has been implemented in deep anterior lamellar keratoplasty surgery navigation. The system carrying artificial intelligence can detect the corneal counter and overlay it onto the video streaming, assisting the surgeons in recognizing the corneal. Furthermore, AR has been introduced in slit-lamp for assisting diagnosis (<xref ref-type="bibr" rid="B69">69</xref>). The images stored previously were placed onto the real-time slit-lamp right-view while the left-view of the slit-lamp remains natural. The users can improve their diagnostic accuracy by comparing the natural view with the standard.</p></sec></sec>
<sec sec-type="discussion" id="s4">
<title>Discussion</title>
<p>AR is a popular technology in ophthalmology and has developed rapidly in the recent decade since the appearance of commercial HMDs and smartphones. The applications of AR in ophthalmology introduced in this review involve therapy, education, and clinical assistance. In order to have a better understanding of AR&#x00027;s development in ophthalmology, we have counted the number of publications in each application according to our classification method in the Google Scholar database (<xref ref-type="supplementary-material" rid="SM1">Supplementary Figure 1</xref> and <xref ref-type="supplementary-material" rid="SM1">Supplementary Table 1</xref>). The AR&#x00027;s applications in ophthalmic therapy account for the largest share, followed by education and clinical assistance. Although the number of studies on therapy is the largest, the research of AR in ophthalmic therapy is unbalanced. There are 14 publications about low vision, but only 1&#x02013;2 studies on nyctalopia, metamorphopsia, and amblyopia. It requires researchers to pay more attention to AR&#x00027;s applications in ocular diseases lacking research. In education, although the amount of publications in medical education is limited, the AR systems are well-evaluated. For example, EYESI, a surgical simulator. A systematic review reported that 38 publications had held evaluation experiments on it (<xref ref-type="bibr" rid="B70">70</xref>). As a result, the development of AR in medical education is more mature than in other fields in ophthalmology. In addition, few researchers study clinical assistance, especially diagnostic assistance. Although it is of great significance to promote the progress of the overall medical level in ophthalmology, a lot of research is needed.</p>
<p>Applications in therapy aim to improve patients&#x00027; vision or activities closer to healthy people. The common symptoms of eye disease are visual disturbances, and AR equips the ability to process visual information to enhance vision. Therefore, the therapy is a unique application in ophthalmology compared to other diseases. Besides, as a non-invasive treatment method, AR has great potential in alternative therapy. For patients with ocular diseases who cannot tolerate surgery or drugs, AR is a better choice. In addition, AR provides visual aids to patients suffering from incurable diseases to improve their quality of life. However, efforts are still needed to pay in ophthalmology treatment. Most AR applications in ophthalmology still lack adequate clinical research to evaluate the effectiveness, especially in ophthalmic disease aid. Although a few of research have held evaluation, the problems with insufficient sample size and selective bias still exist (<xref ref-type="bibr" rid="B42">42</xref>, <xref ref-type="bibr" rid="B62">62</xref>). It is urgent to hold clinical experiments on evaluating AR applications in ophthalmology to provide robust evidence to accelerate their widespread. Besides, a highly customized AR system implemented in ophthalmic disease aid is expected to develop. At present, AR systems implemented in ophthalmic disease aid mainly focus on a specific disease. However, a patient may suffer from multiple diseases at the same time. For example, a patient suffering from myopia and color blindness needs to wear prescription glasses while using the AR system (<xref ref-type="bibr" rid="B25">25</xref>). As a result, the combination of different AR applications in ophthalmic diseases should be considered. In addition, the current applications of AR in fundus diseases such as age-related macular degeneration cannot solve the problem from the root cause because the visual information provided by AR is disturbed or hindered in the transmission of the retina or visual pathway. If it is possible to transmit visual information bypassing the retina or even bypassing the entire visual pathway to achieve the cerebral cortex, patients might recover to healthy levels. However, it requires landmark breakthroughs in Neurobionics and the brain-computer interface (<xref ref-type="bibr" rid="B71">71</xref>). For now, AR can be considered exploring the therapy of ocular diseases by changing visual habits. The implementation of AR in amblyopia therapy provides a good example (<xref ref-type="bibr" rid="B49">49</xref>). Similarly, AR equips excellent potential for intervention to form healthy vision for ocular diseases related to visual development and formation.</p>
<p>The applications of AR in ophthalmic education involve public education and medical education in this review. Their common mechanism is simulating the vision vividly in real-time. The applications of AR in education can provide medical students with a lot of opportunities for practice. AR also allows healthy people to experience the vision of patients. In addition, compared to VR, the vision of AR simulation is more realistic and vivid because it is based on the natural environment. However, the evaluation of AR simulators is still lacking (<xref ref-type="bibr" rid="B70">70</xref>). In public education, obstacles still exist in evaluating several disease simulations, such as simulating the vision of color blindness. Since the vision of color blind patients cannot be obtained, precise evaluation standards have been lacked (<xref ref-type="bibr" rid="B72">72</xref>).</p>
<p>Similarly, the applications of AR in clinical assistance lacks effective evaluation. At present, AR is providing additional information to guide clinical activities in order to improve accuracy. However, AR technology includes not only adding information but also reducing and processing information. Therefore, AR technology can also be considered to remove things overlaying surgery or inspection targets in vision.</p>
<p>In addition to the three areas summarized in this article, AR also has potential in diagnosing and protection. There have been some researches well-developed on VR but lack exploring in AR in the diagnosis of strabismus (<xref ref-type="bibr" rid="B73">73</xref>). It might be possible to achieve better effectiveness by implementing well-developed diagnosis applications from VR to AR since AR is based on natural vision and can be used for a long time (<xref ref-type="bibr" rid="B9">9</xref>). The development of a monitoring system based on AR devices can monitor eye health in real-time and discover hidden diseases. In addition to treating patients with eye diseases, the AR system can be considered to protect healthy people and patients suffering from surgery or treatment. For healthy people, the AR system can be used to process vision in situations that are harmful to the eyes. For instance, when driving under strong sunlight, the brightness of the corresponding field of view is expected to be reduced with an AR system. The AR system can be implemented to prevent possible secondary ocular diseases for patients suffering surgery or treatment. For example, patients who use atropine to dilate their pupils can use AR systems to avoid too bright vision.</p>
<p>Since AR applications in ophthalmology integrate medicine and engineering, the development of AR in engineering is also critical. The most significant restriction of the ophthalmic AR system is the hardware. The resolution of the image is an unavoidable question troubling cameras and displays. A 20/20 vision requires a display system with 60 pixels per degree resolution in theory (<xref ref-type="bibr" rid="B74">74</xref>). Most of the research is based on HMDs and smartphones, but these devices can only provide &#x0007E;10&#x02013;12 pixels per degree (<xref ref-type="bibr" rid="B57">57</xref>). Besides, the computing power and volume of the computing units restrict the function and mobility of the AR system. In the beginning, the AR system relies on a PC (<xref ref-type="bibr" rid="B36">36</xref>). As a result, it is bulky for users to undertake. With the rise of HMDs and smartphones, which use embedded computing centers, the mobility of AR systems has been significantly improved, but the small space limits its computing power. Fortunately, in the stage of Industry 4.0, the 5G telecommunication provides a massive capacity for real-time information transmission, which allows real-time cloud computing (<xref ref-type="bibr" rid="B75">75</xref>). The cloud computing method can reduce the volume immensely and increase the computing power (<xref ref-type="bibr" rid="B76">76</xref>). In addition, some HMDs are heavy, thus making them uncomfortable to use for a long time (<xref ref-type="bibr" rid="B77">77</xref>). Furthermore, the battery capacity is limited, restricting the using time (<xref ref-type="bibr" rid="B77">77</xref>). It is a hazard for ophthalmic patients that the AR system strikes in some time. This situation could be prevented by developing battery technology and multi-energy power (<xref ref-type="bibr" rid="B78">78</xref>).</p>
<p>In conclusion, applications of AR in ophthalmology have been implemented in therapy, education, and clinical assistance. However, there is still a large room for development, which needs researchers to pay more effort. Applications in diagnosis and protection might be worth exploring. Although the obstacles of hardware restrict the development of AR in ophthalmology at present, the AR will realize its potential and play an important role in ophthalmology in the future with the rapidly developing technology and more in-depth research.</p></sec>
<sec id="s5">
<title>Author Contributions</title>
<p>HL, TL, and XZ designed the study. TL, CL, and XZ co-wrote the manuscript. WL, YC, and YY discussed and edited the paper. HL supervised this study. All authors discussed the results and commented on the paper.</p></sec>
<sec sec-type="funding-information" id="s6">
<title>Funding</title>
<p>This study was funded by the National Natural Science Foundation of China (81770967 and 81822010). The funders had no role in the study design, interpretation, and writing of the paper.</p></sec>
<sec sec-type="COI-statement" id="conf1">
<title>Conflict of Interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p></sec>
<sec sec-type="disclaimer" id="s7">
<title>Publisher&#x00027;s Note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p></sec> </body>
<back><sec sec-type="supplementary-material" id="s8">
<title>Supplementary Material</title>
<p>The Supplementary Material for this article can be found online at: <ext-link ext-link-type="uri" xlink:href="https://www.frontiersin.org/articles/10.3389/fmed.2021.733241/full#supplementary-material">https://www.frontiersin.org/articles/10.3389/fmed.2021.733241/full#supplementary-material</ext-link></p>
<supplementary-material xlink:href="Data_Sheet_1.PDF" id="SM1" mimetype="application/pdf" xmlns:xlink="http://www.w3.org/1999/xlink"/></sec>
<ref-list>
<title>References</title>
<ref id="B1">
<label>1.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Carmigniani</surname> <given-names>J</given-names></name> <name><surname>Furht</surname> <given-names>B</given-names></name> <name><surname>Anisetti</surname> <given-names>M</given-names></name> <name><surname>Ceravolo</surname> <given-names>P</given-names></name> <name><surname>Damiani</surname> <given-names>E</given-names></name> <name><surname>Ivkovic</surname> <given-names>M</given-names></name></person-group>. <article-title>Augmented reality technologies, systems and applications</article-title>. <source>Multimed Tools Appl.</source> (<year>2011</year>) <volume>51</volume>:<fpage>341</fpage>&#x02013;<lpage>77</lpage>. <pub-id pub-id-type="doi">10.1007/s11042-010-0660-6</pub-id></citation>
</ref>
<ref id="B2">
<label>2.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Azuma</surname> <given-names>R</given-names></name> <name><surname>Baillot</surname> <given-names>Y</given-names></name> <name><surname>Behringer</surname> <given-names>R</given-names></name> <name><surname>Feiner</surname> <given-names>S</given-names></name> <name><surname>Julier</surname> <given-names>S</given-names></name> <name><surname>MacIntyre</surname> <given-names>B</given-names></name></person-group>. <article-title>Recent advances in augmented reality</article-title>. <source>IEEE Comput Graph Appl.</source> (<year>2001</year>) <volume>21</volume>:<fpage>34</fpage>&#x02013;<lpage>47</lpage>. <pub-id pub-id-type="doi">10.1109/38.963459</pub-id><pub-id pub-id-type="pmid">27295638</pub-id></citation></ref>
<ref id="B3">
<label>3.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>X</given-names></name> <name><surname>Ong</surname> <given-names>SK</given-names></name> <name><surname>Nee</surname> <given-names>AYC</given-names></name></person-group>. <article-title>A comprehensive survey of augmented reality assembly research</article-title>. <source>Adv Manufact.</source> (<year>2016</year>) <volume>4</volume>:<fpage>1</fpage>&#x02013;<lpage>22</lpage>. <pub-id pub-id-type="doi">10.1007/s40436-015-0131-4</pub-id></citation>
</ref>
<ref id="B4">
<label>4.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sielhorst</surname> <given-names>T</given-names></name> <name><surname>Feuerstein</surname> <given-names>M</given-names></name> <name><surname>Navab</surname> <given-names>N</given-names></name></person-group>. <article-title>Advanced medical displays: a literature review of augmented reality</article-title>. <source>J Display Technol.</source> (<year>2008</year>) <volume>4</volume>:<fpage>451</fpage>&#x02013;<lpage>67</lpage>. <pub-id pub-id-type="doi">10.1109/JDT.2008.2001575</pub-id><pub-id pub-id-type="pmid">27295638</pub-id></citation></ref>
<ref id="B5">
<label>5.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lee</surname> <given-names>K</given-names></name></person-group>. <article-title>Augmented reality in education and training</article-title>. <source>TechTrends.</source> (<year>2012</year>) <volume>56</volume>:<fpage>13</fpage>&#x02013;<lpage>21</lpage>. <pub-id pub-id-type="doi">10.1007/s11528-012-0559-3</pub-id></citation>
</ref>
<ref id="B6">
<label>6.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mahmud</surname> <given-names>N</given-names></name> <name><surname>Cohen</surname> <given-names>J</given-names></name> <name><surname>Tsourides</surname> <given-names>K</given-names></name> <name><surname>Berzin</surname> <given-names>TM</given-names></name></person-group>. <article-title>Computer vision and augmented reality in gastrointestinal endoscopy</article-title>. <source>Gastroenterol Rep.</source> (<year>2015</year>) <volume>3</volume>:<fpage>179</fpage>&#x02013;<lpage>84</lpage>. <pub-id pub-id-type="doi">10.1093/gastro/gov027</pub-id><pub-id pub-id-type="pmid">26133175</pub-id></citation></ref>
<ref id="B7">
<label>7.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>V&#x000E1;vra</surname> <given-names>P</given-names></name> <name><surname>Roman</surname> <given-names>J</given-names></name> <name><surname>Zon&#x0010D;a</surname> <given-names>P</given-names></name> <name><surname>Ihn&#x000E1;t</surname> <given-names>P</given-names></name> <name><surname>N&#x0011B;mec</surname> <given-names>M</given-names></name> <name><surname>Kumar</surname> <given-names>J</given-names></name> <etal/></person-group>. <article-title>Recent development of augmented reality in surgery: a review</article-title>. <source>J Healthc Eng.</source> (<year>2017</year>) <volume>2017</volume>:<fpage>4574172</fpage>. <pub-id pub-id-type="doi">10.1155/2017/4574172</pub-id><pub-id pub-id-type="pmid">29065604</pub-id></citation></ref>
<ref id="B8">
<label>8.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Van Krevelen</surname> <given-names>D</given-names></name> <name><surname>Poelman</surname> <given-names>R</given-names></name></person-group>. <article-title>A survey of augmented reality technologies, applications and limitations</article-title>. <source>Int J Virt Real.</source> (<year>2010</year>) <volume>9</volume>:<fpage>1</fpage>&#x02013;<lpage>20</lpage>. <pub-id pub-id-type="doi">10.20870/IJVR.2010.9.2.2767</pub-id></citation>
</ref>
<ref id="B9">
<label>9.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Manuri</surname> <given-names>F</given-names></name> <name><surname>Sanna</surname> <given-names>A</given-names></name></person-group>. <article-title>A survey on applications of augmented reality</article-title>. <source>ACSIJ Adv Comput Sci Int J.</source> (<year>2016</year>) <volume>5</volume>:<fpage>18</fpage>&#x02013;<lpage>27</lpage>. <pub-id pub-id-type="doi">10.1109/ICIIP47207.2019.8985779</pub-id><pub-id pub-id-type="pmid">27295638</pub-id></citation></ref>
<ref id="B10">
<label>10.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sayed</surname> <given-names>AM</given-names></name> <name><surname>Kashem</surname> <given-names>R</given-names></name> <name><surname>Abdel-Mottaleb</surname> <given-names>M</given-names></name> <name><surname>Roongpoovapatr</surname> <given-names>V</given-names></name> <name><surname>Eleiwa</surname> <given-names>TK</given-names></name> <name><surname>Abdel-Mottaleb</surname> <given-names>M</given-names></name> <etal/></person-group>. <article-title>Toward improving the mobility of patients with peripheral visual field defects with novel digital spectacles</article-title>. <source>Am J Ophthalmol.</source> (<year>2020</year>) <volume>210</volume>:<fpage>136</fpage>&#x02013;<lpage>45</lpage>. <pub-id pub-id-type="doi">10.1016/j.ajo.2019.10.005</pub-id><pub-id pub-id-type="pmid">31606442</pub-id></citation></ref>
<ref id="B11">
<label>11.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hirooka</surname> <given-names>K</given-names></name> <name><surname>Sato</surname> <given-names>S</given-names></name> <name><surname>Nitta</surname> <given-names>E</given-names></name> <name><surname>Tsujikawa</surname> <given-names>A</given-names></name></person-group>. <article-title>The relationship between vision-related quality of life and visual function in glaucoma patients</article-title>. <source>J Glaucoma.</source> (<year>2016</year>) <volume>25</volume>:<fpage>505</fpage>&#x02013;<lpage>9</lpage>. <pub-id pub-id-type="doi">10.1097/IJG.0000000000000372</pub-id><pub-id pub-id-type="pmid">26766401</pub-id></citation></ref>
<ref id="B12">
<label>12.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ong</surname> <given-names>YH</given-names></name> <name><surname>Jacquin-Courtois</surname> <given-names>S</given-names></name> <name><surname>Gorgoraptis</surname> <given-names>N</given-names></name> <name><surname>Bays</surname> <given-names>PM</given-names></name> <name><surname>Husain</surname> <given-names>M</given-names></name> <name><surname>Leff</surname> <given-names>AP</given-names></name></person-group>. <article-title>Eye-search: a web-based therapy that improves visual search in hemianopia</article-title>. <source>Ann Clin Transl Neurol.</source> (<year>2015</year>) <volume>2</volume>:<fpage>74</fpage>&#x02013;<lpage>8</lpage>. <pub-id pub-id-type="doi">10.1002/acn3.154</pub-id><pub-id pub-id-type="pmid">25642437</pub-id></citation></ref>
<ref id="B13">
<label>13.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Zhao</surname> <given-names>Y</given-names></name> <name><surname>Szpiro</surname> <given-names>S</given-names></name> <name><surname>Knighten</surname> <given-names>J</given-names></name> <name><surname>Azenkot</surname> <given-names>S</given-names></name></person-group>. <article-title>CueSee: exploring visual cues for people with low vision to facilitate a visual search task</article-title>. In: <source>2016 ACM International Joint Conference on Pervasive and Ubiquitous Computing</source>. <publisher-loc>Heidelberg</publisher-loc> (<year>2016</year>). p. <fpage>73</fpage>&#x02013;<lpage>84</lpage>. <pub-id pub-id-type="doi">10.1145/2971648.2971730</pub-id></citation>
</ref>
<ref id="B14">
<label>14.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dhital</surname> <given-names>A</given-names></name> <name><surname>Pey</surname> <given-names>T</given-names></name> <name><surname>Stanford</surname> <given-names>MR</given-names></name></person-group>. <article-title>Visual loss and falls: a review</article-title>. <source>Eye.</source> (<year>2010</year>) <volume>24</volume>:<fpage>1437</fpage>&#x02013;<lpage>46</lpage>. <pub-id pub-id-type="doi">10.1038/eye.2010.60</pub-id><pub-id pub-id-type="pmid">20448666</pub-id></citation></ref>
<ref id="B15">
<label>15.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Zhao</surname> <given-names>X</given-names></name> <name><surname>Go</surname> <given-names>K</given-names></name> <name><surname>Kashiwagi</surname> <given-names>K</given-names></name> <name><surname>Toyoura</surname> <given-names>M</given-names></name> <name><surname>Mao</surname> <given-names>X</given-names></name> <name><surname>Fujishiro</surname> <given-names>I</given-names></name></person-group>. <article-title>Computational alleviation of homonymous visual field defect with OST-HMD: the effect of size and position of overlaid overview window</article-title>. In: <source>2019 International Conference on Cyberworlds (CW)</source>. <publisher-loc>Kyoto</publisher-loc> (<year>2019</year>). p. <fpage>175</fpage>&#x02013;<lpage>82</lpage>. <pub-id pub-id-type="doi">10.1109/CW.2019.00036</pub-id><pub-id pub-id-type="pmid">27295638</pub-id></citation></ref>
<ref id="B16">
<label>16.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Apfelbaum</surname> <given-names>HL</given-names></name> <name><surname>Apfelbaum</surname> <given-names>DH</given-names></name> <name><surname>Woods</surname> <given-names>RL</given-names></name> <name><surname>Peli</surname> <given-names>E</given-names></name></person-group>. <article-title>Inattentional blindness and augmented-vision displays: effects of cartoon-like filtering and attended scene</article-title>. <source>Ophthal Physiol Opt.</source> (<year>2008</year>) <volume>28</volume>:<fpage>204</fpage>&#x02013;<lpage>17</lpage>. <pub-id pub-id-type="doi">10.1111/j.1475-1313.2008.00537.x</pub-id><pub-id pub-id-type="pmid">18426419</pub-id></citation></ref>
<ref id="B17">
<label>17.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Younis</surname> <given-names>O</given-names></name> <name><surname>Al-Nuaimy</surname> <given-names>W</given-names></name> <name><surname>Rowe</surname> <given-names>F</given-names></name></person-group>. <article-title>A hazard detection and tracking system for people with peripheral vision loss using smart glasses and augmented reality</article-title>. <source>Int J Adv Comput Sci Appl.</source> (<year>2019</year>) <volume>10</volume>:<fpage>1</fpage>&#x02013;<lpage>9</lpage>. <pub-id pub-id-type="doi">10.14569/IJACSA.2019.0100201</pub-id><pub-id pub-id-type="pmid">30959756</pub-id></citation></ref>
<ref id="B18">
<label>18.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sayed</surname> <given-names>AM</given-names></name> <name><surname>Abdel-Mottaleb</surname> <given-names>M</given-names></name> <name><surname>Kashem</surname> <given-names>R</given-names></name> <name><surname>Roongpoovapatr</surname> <given-names>V</given-names></name> <name><surname>Elsawy</surname> <given-names>A</given-names></name> <name><surname>Abdel-Mottaleb</surname> <given-names>M</given-names></name> <etal/></person-group>. <article-title>Expansion of peripheral visual field with novel virtual reality digital spectacles</article-title>. <source>Am J Ophthalmol.</source> (<year>2020</year>) <volume>210</volume>:<fpage>125</fpage>&#x02013;<lpage>35</lpage>. <pub-id pub-id-type="doi">10.1016/j.ajo.2019.10.006</pub-id><pub-id pub-id-type="pmid">31626763</pub-id></citation></ref>
<ref id="B19">
<label>19.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Peli</surname> <given-names>E</given-names></name></person-group>. <article-title>Vision multiplexing: an engineering approach to vision rehabilitation device development</article-title>. <source>Optom Vis Sci.</source> (<year>2001</year>) <volume>78</volume>:<fpage>304</fpage>&#x02013;<lpage>15</lpage>. <pub-id pub-id-type="doi">10.1097/00006324-200105000-00014</pub-id><pub-id pub-id-type="pmid">11384008</pub-id></citation></ref>
<ref id="B20">
<label>20.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Peli</surname> <given-names>E</given-names></name></person-group>. <article-title>Vision multiplexing: an optical engineering concept for low-vision aids</article-title>. In: <source>Proceedings SPIE 6667, Current Developments in Lens Design and Optical Engineering VIII. Vol. 66670C</source>. <publisher-loc>San Diego, CA</publisher-loc> (<year>2007</year>). <pub-id pub-id-type="doi">10.1117/12.729315</pub-id><pub-id pub-id-type="pmid">12322927</pub-id></citation></ref>
<ref id="B21">
<label>21.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Ichinose</surname> <given-names>K</given-names></name> <name><surname>Fujishiro</surname> <given-names>I</given-names></name> <name><surname>Kashiwagi</surname> <given-names>K</given-names></name> <name><surname>Mao</surname> <given-names>X</given-names></name> <name><surname>Zhao</surname> <given-names>X</given-names></name> <name><surname>Toyoura</surname> <given-names>M</given-names></name> <etal/></person-group>. <article-title>Visual field loss compensation for homonymous hemianopia patients using edge indicator</article-title>. In: <source>2020 International Conference on Cyberworlds (CW)</source>. <publisher-loc>Caen</publisher-loc> (<year>2020</year>). p. <fpage>79</fpage>&#x02013;<lpage>85</lpage>. <pub-id pub-id-type="doi">10.1109/CW49994.2020.00019</pub-id><pub-id pub-id-type="pmid">27295638</pub-id></citation></ref>
<ref id="B22">
<label>22.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>El Moussawi</surname> <given-names>Z</given-names></name> <name><surname>Boueiri</surname> <given-names>M</given-names></name> <name><surname>Al-Haddad</surname> <given-names>C</given-names></name></person-group>. <article-title>Gene therapy in color vision deficiency: a review</article-title>. <source>Int Ophthalmol.</source> (<year>2021</year>) <volume>41</volume>:<fpage>1917</fpage>&#x02013;<lpage>27</lpage>. <pub-id pub-id-type="doi">10.1007/s10792-021-01717-0</pub-id><pub-id pub-id-type="pmid">33528822</pub-id></citation></ref>
<ref id="B23">
<label>23.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Keene</surname> <given-names>DR</given-names></name></person-group>. <article-title>A review of color blindness for microscopists: guidelines and tools for accommodating and coping with color vision deficiency</article-title>. <source>Microsc Microanal.</source> (<year>2015</year>) <volume>21</volume>:<fpage>279</fpage>&#x02013;<lpage>89</lpage>. <pub-id pub-id-type="doi">10.1017/S1431927615000173</pub-id><pub-id pub-id-type="pmid">25739321</pub-id></citation></ref>
<ref id="B24">
<label>24.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Stoianov</surname> <given-names>M</given-names></name> <name><surname>de Oliveira</surname> <given-names>MS</given-names></name> <name><surname>dos Santos Ribeiro</surname> <given-names>MCL</given-names></name> <name><surname>Ferreira</surname> <given-names>MH</given-names></name> <name><surname>de Oliveira Marques</surname> <given-names>I</given-names></name> <name><surname>Gualtieri</surname> <given-names>M</given-names></name></person-group>. <article-title>The impacts of abnormal color vision on people&#x00027;s life: an integrative review</article-title>. <source>Qual Life Res.</source> (<year>2019</year>) <volume>28</volume>:<fpage>855</fpage>&#x02013;<lpage>62</lpage>. <pub-id pub-id-type="doi">10.1007/s11136-018-2030-1</pub-id><pub-id pub-id-type="pmid">30443703</pub-id></citation></ref>
<ref id="B25">
<label>25.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lausegger</surname> <given-names>G</given-names></name> <name><surname>Spitzer</surname> <given-names>M</given-names></name> <name><surname>Ebner</surname> <given-names>M</given-names></name></person-group>. <article-title>OmniColor&#x02013;a smart glasses app to support colorblind people</article-title>. <source>Int J Interact Mobile Technol.</source> (<year>2017</year>) <volume>11</volume>:<fpage>161</fpage>&#x02013;<lpage>77</lpage>. <pub-id pub-id-type="doi">10.3991/ijim.v11i5.6922</pub-id></citation>
</ref>
<ref id="B26">
<label>26.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Tanuwidjaja</surname> <given-names>E</given-names></name> <name><surname>Huynh</surname> <given-names>D</given-names></name> <name><surname>Koa</surname> <given-names>K</given-names></name> <name><surname>Nguyen</surname> <given-names>C</given-names></name> <name><surname>Shao</surname> <given-names>C</given-names></name> <name><surname>Torbett</surname> <given-names>P</given-names></name> <etal/></person-group>. <article-title>Chroma: a wearable augmented-reality solution for color blindness</article-title>. In: <source>2014 ACM International Joint Conference on Pervasive and Ubiquitous Computing</source>. <publisher-loc>Seattle, WA</publisher-loc> (<year>2014</year>). p. <fpage>799</fpage>&#x02013;<lpage>810</lpage>. <pub-id pub-id-type="doi">10.1145/2632048.2632091</pub-id></citation>
</ref>
<ref id="B27">
<label>27.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Langlotz</surname> <given-names>T</given-names></name> <name><surname>Sutton</surname> <given-names>J</given-names></name> <name><surname>Zollmann</surname> <given-names>S</given-names></name> <name><surname>Itoh</surname> <given-names>Y</given-names></name> <name><surname>Regenbrecht</surname> <given-names>H</given-names></name></person-group>. <article-title>Chromaglasses: computational glasses for compensating colour blindness</article-title>. In: <source>Paper Presented at the Proceedings of the 2018 CHI Conference on Human Factors in Computing Systems.</source> <publisher-loc>Montreal, QC</publisher-loc> (<year>2018</year>). p. <fpage>1</fpage>&#x02013;<lpage>12</lpage>. <pub-id pub-id-type="doi">10.1145/3173574.3173964</pub-id></citation>
</ref>
<ref id="B28">
<label>28.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Popleteev</surname> <given-names>A</given-names></name> <name><surname>Louveton</surname> <given-names>N</given-names></name> <name><surname>McCall</surname> <given-names>R</given-names></name></person-group>. <article-title>Colorizer: smart glasses aid for the colorblind</article-title>. In: <source>Proceedings of the 2015 Workshop on Wearable Systems and Applications</source>. <publisher-loc>Florence</publisher-loc> (<year>2015</year>). p. <fpage>7</fpage>&#x02013;<lpage>8</lpage>. <pub-id pub-id-type="doi">10.1145/2753509.2753516</pub-id></citation>
</ref>
<ref id="B29">
<label>29.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Schmitt</surname> <given-names>S</given-names></name> <name><surname>Stein</surname> <given-names>S</given-names></name> <name><surname>Hampe</surname> <given-names>F</given-names></name> <name><surname>Paulus</surname> <given-names>D</given-names></name></person-group>. <article-title>Mobile services supporting color vision deficiency</article-title>. In: <source>2012 13th International Conference on Optimization of Electrical and Electronic Equipment (OPTIM)</source>. <publisher-loc>Brasov</publisher-loc> (<year>2012</year>). p. <fpage>1413</fpage>&#x02013;<lpage>20</lpage>. <pub-id pub-id-type="doi">10.1109/OPTIM.2012.6231860</pub-id><pub-id pub-id-type="pmid">27295638</pub-id></citation></ref>
<ref id="B30">
<label>30.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dheeraj</surname> <given-names>K</given-names></name> <name><surname>Jilani</surname> <given-names>SAK</given-names></name> <name><surname>JaveedHussain</surname> <given-names>MS</given-names></name></person-group>. <article-title>Real-time automated guidance system to detect and label color for color blind people using raspberry Pi</article-title>. <source>SSRG Int J Electron Commun Eng.</source> (<year>2015</year>) <volume>2</volume>:<fpage>11</fpage>&#x02013;<lpage>4</lpage>. <pub-id pub-id-type="doi">10.14445/23488549/IJECE-V2I11P103</pub-id></citation>
</ref>
<ref id="B31">
<label>31.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Melillo</surname> <given-names>P</given-names></name> <name><surname>Riccio</surname> <given-names>D</given-names></name> <name><surname>Di Perna</surname> <given-names>L</given-names></name> <name><surname>Di Baja</surname> <given-names>GS</given-names></name> <name><surname>De Nino</surname> <given-names>M</given-names></name> <name><surname>Rossi</surname> <given-names>S</given-names></name> <etal/></person-group>. <article-title>Wearable improved vision system for color vision deficiency correction</article-title>. <source>IEEE J Transl Eng Health Med.</source> (<year>2017</year>) <volume>5</volume>:<fpage>1</fpage>&#x02013;<lpage>7</lpage>. <pub-id pub-id-type="doi">10.1109/JTEHM.2017.2679746</pub-id><pub-id pub-id-type="pmid">28507827</pub-id></citation></ref>
<ref id="B32">
<label>32.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Tang</surname> <given-names>Y</given-names></name> <name><surname>Zhu</surname> <given-names>Z</given-names></name> <name><surname>Toyoura</surname> <given-names>M</given-names></name> <name><surname>Go</surname> <given-names>K</given-names></name> <name><surname>Kashiwagi</surname> <given-names>K</given-names></name> <name><surname>Fujishiro</surname> <given-names>I</given-names></name> <etal/></person-group>. <article-title>Arriving light control for color vision deficiency compensation using optical see-through head-mounted display</article-title>. In: <source>16th ACM SIGGRAPH International Conference on Virtual-Reality Continuum and its Applications in Industry</source>. <publisher-loc>Tokyo</publisher-loc> (<year>2018</year>). p. <fpage>1</fpage>&#x02013;<lpage>6</lpage>. <pub-id pub-id-type="doi">10.1145/3284398.3284407</pub-id></citation>
</ref>
<ref id="B33">
<label>33.</label>
<citation citation-type="web"><person-group person-group-type="author"><collab>World Health Organization. International Classification of Impairments, Disabilities, and Handicaps: A Manual of Classification Relating to the Consequences of Disease. Geneva: World Health Organization</collab></person-group> (<year>1980</year>). Available online at: <ext-link ext-link-type="uri" xlink:href="https://apps.who.int/iris/bitstream/handle/10665/41003/9241541261_eng.pdf">https://apps.who.int/iris/bitstream/handle/10665/41003/9241541261_eng.pdf</ext-link> (accessed June 29, 2021).</citation>
</ref>
<ref id="B34">
<label>34.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Scholl</surname> <given-names>HP</given-names></name> <name><surname>Strauss</surname> <given-names>RW</given-names></name> <name><surname>Singh</surname> <given-names>MS</given-names></name> <name><surname>Dalkara</surname> <given-names>D</given-names></name> <name><surname>Roska</surname> <given-names>B</given-names></name> <name><surname>Picaud</surname> <given-names>S</given-names></name> <etal/></person-group>. <article-title>Emerging therapies for inherited retinal degeneration</article-title>. <source>Sci Transl Med.</source> (<year>2016</year>) <volume>8</volume>:<fpage>368rv6</fpage>. <pub-id pub-id-type="doi">10.1126/scitranslmed.aaf2838</pub-id><pub-id pub-id-type="pmid">27928030</pub-id></citation></ref>
<ref id="B35">
<label>35.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Angelopoulos</surname> <given-names>AN</given-names></name> <name><surname>Ameri</surname> <given-names>H</given-names></name> <name><surname>Mitra</surname> <given-names>D</given-names></name> <name><surname>Humayun</surname> <given-names>M</given-names></name></person-group>. <article-title>Enhanced depth navigation through augmented reality depth mapping in patients with low vision</article-title>. <source>Sci Rep.</source> (<year>2019</year>) <volume>9</volume>:<fpage>11230</fpage>. <pub-id pub-id-type="doi">10.1038/s41598-019-47397-w</pub-id><pub-id pub-id-type="pmid">31375713</pub-id></citation></ref>
<ref id="B36">
<label>36.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hicks</surname> <given-names>SL</given-names></name> <name><surname>Wilson</surname> <given-names>I</given-names></name> <name><surname>Muhammed</surname> <given-names>L</given-names></name> <name><surname>Worsfold</surname> <given-names>J</given-names></name> <name><surname>Downes</surname> <given-names>SM</given-names></name> <name><surname>Kennard</surname> <given-names>C</given-names></name></person-group>. <article-title>A depth-based head-mounted visual display to aid navigation in partially sighted individuals</article-title>. <source>PLoS ONE.</source> (<year>2013</year>) <volume>8</volume>:<fpage>e67695</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0067695</pub-id><pub-id pub-id-type="pmid">23844067</pub-id></citation></ref>
<ref id="B37">
<label>37.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kinateder</surname> <given-names>M</given-names></name> <name><surname>Gualtieri</surname> <given-names>J</given-names></name> <name><surname>Dunn</surname> <given-names>MJ</given-names></name> <name><surname>Jarosz</surname> <given-names>W</given-names></name> <name><surname>Yang</surname> <given-names>XD</given-names></name> <name><surname>Cooper</surname> <given-names>EA</given-names></name></person-group>. <article-title>Using an augmented reality device as a distance-based vision aid&#x02014;promise and limitations</article-title>. <source>Optom Vis Sci.</source> (<year>2018</year>) <volume>95</volume>:<fpage>727</fpage>&#x02013;<lpage>37</lpage>. <pub-id pub-id-type="doi">10.1097/OPX.0000000000001232</pub-id><pub-id pub-id-type="pmid">29877901</pub-id></citation></ref>
<ref id="B38">
<label>38.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Liu</surname> <given-names>Y</given-names></name> <name><surname>Stiles</surname> <given-names>NR</given-names></name> <name><surname>Meister</surname> <given-names>M</given-names></name></person-group>. <article-title>Augmented reality powers a cognitive assistant for the blind</article-title>. <source>Elife.</source> (<year>2018</year>) <volume>7</volume>:<fpage>e37841</fpage>. <pub-id pub-id-type="doi">10.7554/eLife.37841</pub-id><pub-id pub-id-type="pmid">30479270</pub-id></citation></ref>
<ref id="B39">
<label>39.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Zhao</surname> <given-names>Y</given-names></name> <name><surname>Szpiro</surname> <given-names>S</given-names></name> <name><surname>Azenkot</surname> <given-names>S</given-names></name></person-group>. <article-title>Foresee: a customizable head-mounted vision enhancement system for people with low vision</article-title>. In: <source>17th International ACM SIGACCESS Conference on Computers and Accessibility</source>. <publisher-loc>Lisbon</publisher-loc> (<year>2015</year>). p. <fpage>239</fpage>&#x02013;<lpage>49</lpage>. <pub-id pub-id-type="doi">10.1145/2700648.2809865</pub-id></citation>
</ref>
<ref id="B40">
<label>40.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Bakshi</surname> <given-names>AM</given-names></name> <name><surname>Simson</surname> <given-names>J</given-names></name> <name><surname>de Castro</surname> <given-names>C</given-names></name> <name><surname>Yu</surname> <given-names>CC</given-names></name> <name><surname>Dias</surname> <given-names>A</given-names></name></person-group>. <article-title>Bright: an augmented reality assistive platform for visual impairment</article-title>. In: <source>The 2019 IEEE Games, Entertainment, Media Conference (GEM)</source>. <publisher-loc>New Haven, CT</publisher-loc> (<year>2019</year>). p. <fpage>1</fpage>&#x02013;<lpage>4</lpage>. <pub-id pub-id-type="doi">10.1109/GEM.2019.8811556</pub-id><pub-id pub-id-type="pmid">27295638</pub-id></citation></ref>
<ref id="B41">
<label>41.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Gon&#x000E7;alves</surname> <given-names>P</given-names></name> <name><surname>Orlosky</surname> <given-names>J</given-names></name> <name><surname>Machulla</surname> <given-names>TK</given-names></name></person-group>. <article-title>An augmented reality assistant to support button selection for patients with age-related macular degeneration</article-title>. In: <source>2020 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)</source>. <publisher-loc>Atlanta, GA</publisher-loc> (<year>2020</year>) p. <fpage>730</fpage>&#x02013;<lpage>1</lpage>. <pub-id pub-id-type="doi">10.1109/VRW50115.2020.00216</pub-id><pub-id pub-id-type="pmid">27295638</pub-id></citation></ref>
<ref id="B42">
<label>42.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Min Htike</surname> <given-names>HH</given-names></name> <name><surname>Margrain</surname> <given-names>T</given-names></name> <name><surname>Lai</surname> <given-names>YK</given-names></name> <name><surname>Eslambolchilar</surname> <given-names>P</given-names></name></person-group>. <article-title>Augmented reality glasses as an orientation and mobility aid for people with low vision: a feasibility study of experiences and requirements</article-title>. In: <source>2021 CHI Conference on Human Factors in Computing Systems</source>. <publisher-loc>Yokohama</publisher-loc> (<year>2021</year>). p. <fpage>1</fpage>&#x02013;<lpage>15</lpage>. <pub-id pub-id-type="doi">10.1145/3411764.3445327</pub-id></citation>
</ref>
<ref id="B43">
<label>43.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Stearns</surname> <given-names>L</given-names></name> <name><surname>Findlater</surname> <given-names>L</given-names></name> <name><surname>Froehlich</surname> <given-names>JE</given-names></name></person-group>. <article-title>Design of an augmented reality magnification aid for low vision users</article-title>. In: <source>20th International ACM SIGACCESS Conference on Computers and Accessibility</source>. <publisher-loc>Galway</publisher-loc> (<year>2018</year>). p. <fpage>28</fpage>&#x02013;<lpage>39</lpage>. <pub-id pub-id-type="doi">10.1145/3234695.3236361</pub-id></citation>
</ref>
<ref id="B44">
<label>44.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hwang</surname> <given-names>AD</given-names></name> <name><surname>Peli</surname> <given-names>E</given-names></name></person-group>. <article-title>An augmented-reality edge enhancement application for Google Glass</article-title>. <source>Optom Vis Sci.</source> (<year>2014</year>) <volume>91</volume>:<fpage>1021</fpage>. <pub-id pub-id-type="doi">10.1097/OPX.0000000000000326</pub-id><pub-id pub-id-type="pmid">24978871</pub-id></citation></ref>
<ref id="B45">
<label>45.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Lang</surname> <given-names>F</given-names></name> <name><surname>Schmidt</surname> <given-names>A</given-names></name> <name><surname>Machulla</surname> <given-names>T</given-names></name></person-group>. <article-title>Augmented reality for people with low vision: symbolic and alphanumeric representation of information</article-title>. In: <source>The International Conference on Computers Helping People with Special Needs</source>. <publisher-loc>Lecco</publisher-loc> (<year>2020</year>). p. <fpage>146</fpage>&#x02013;<lpage>56</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-030-58796-3_19</pub-id></citation>
</ref>
<ref id="B46">
<label>46.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Huang</surname> <given-names>J</given-names></name> <name><surname>Kinateder</surname> <given-names>M</given-names></name> <name><surname>Dunn</surname> <given-names>MJ</given-names></name> <name><surname>Jarosz</surname> <given-names>W</given-names></name> <name><surname>Yang</surname> <given-names>XD</given-names></name> <name><surname>Cooper</surname> <given-names>EA</given-names></name></person-group>. <article-title>An augmented reality sign-reading assistant for users with reduced vision</article-title>. <source>PLoS ONE.</source> (<year>2019</year>) <volume>14</volume>:<fpage>e0210630</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0210630</pub-id><pub-id pub-id-type="pmid">30650159</pub-id></citation></ref>
<ref id="B47">
<label>47.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Almutairi</surname> <given-names>F</given-names></name> <name><surname>Almeshari</surname> <given-names>N</given-names></name> <name><surname>Ahmad</surname> <given-names>K</given-names></name> <name><surname>Magliyah</surname> <given-names>MS</given-names></name> <name><surname>Schatz</surname> <given-names>P</given-names></name></person-group>. <article-title>Congenital stationary night blindness: an update and review of the disease spectrum in Saudi Arabia</article-title>. <source>Acta Ophthalmol</source>. (<year>2020</year>) <volume>99</volume>:<fpage>581</fpage>&#x02013;<lpage>91</lpage>. <pub-id pub-id-type="doi">10.1111/aos.14693</pub-id><pub-id pub-id-type="pmid">33369259</pub-id></citation></ref>
<ref id="B48">
<label>48.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Hu</surname> <given-names>C</given-names></name> <name><surname>Zhai</surname> <given-names>G</given-names></name> <name><surname>Li</surname> <given-names>D</given-names></name></person-group>. <article-title>An augmented-reality night vision enhancement application for see-through glasses</article-title>. In: <source>2015 IEEE International Conference on Multimedia and Expo Workshops (ICMEW)</source>. <publisher-loc>Turin</publisher-loc> (<year>2015</year>). p. <fpage>1</fpage>&#x02013;<lpage>6</lpage>. <pub-id pub-id-type="doi">10.1109/ICMEW.2015.7169860</pub-id><pub-id pub-id-type="pmid">27295638</pub-id></citation></ref>
<ref id="B49">
<label>49.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Nowak</surname> <given-names>A</given-names></name> <name><surname>Wozniak</surname> <given-names>M</given-names></name> <name><surname>Pieprzowski</surname> <given-names>M</given-names></name> <name><surname>Romanowski</surname> <given-names>A</given-names></name></person-group>. <article-title>Towards amblyopia therapy using mixed reality technology</article-title>. In: <source>Paper presented at the 2018 Federated Conference on Computer Science and Information Systems (FedCSIS)</source>. <publisher-loc>Poznan</publisher-loc> (<year>2018</year>). p. <fpage>279</fpage>&#x02013;<lpage>82</lpage>. <pub-id pub-id-type="doi">10.15439/2018F335</pub-id></citation>
</ref>
<ref id="B50">
<label>50.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Fernandez</surname> <given-names>A</given-names></name> <name><surname>Fernandez</surname> <given-names>P</given-names></name> <name><surname>L&#x000F3;pez</surname> <given-names>G</given-names></name> <name><surname>Calder&#x000F3;n</surname> <given-names>M</given-names></name> <name><surname>Guerrero</surname> <given-names>LA</given-names></name></person-group>. <article-title>Troyoculus: an augmented reality system to improve reading capabilities of night-blind people</article-title>. In: <source>The International Work-Conference on Ambient Assisted Living</source>. <publisher-loc>Puerto Varas</publisher-loc> (<year>2015</year>). p. <fpage>16</fpage>&#x02013;<lpage>28</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-319-26410-3_3</pub-id></citation>
</ref>
<ref id="B51">
<label>51.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Midena</surname> <given-names>E</given-names></name> <name><surname>Vujosevic</surname> <given-names>S</given-names></name></person-group>. <article-title>Metamorphopsia: an overlooked visual symptom</article-title>. <source>Ophthalmic Res.</source> (<year>2016</year>) <volume>55</volume>:<fpage>26</fpage>&#x02013;<lpage>36</lpage>. <pub-id pub-id-type="doi">10.1159/000441033</pub-id><pub-id pub-id-type="pmid">26554918</pub-id></citation></ref>
<ref id="B52">
<label>52.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Bozzelli</surname> <given-names>G</given-names></name> <name><surname>De Nino</surname> <given-names>M</given-names></name> <name><surname>Pero</surname> <given-names>C</given-names></name> <name><surname>Ricciardi</surname> <given-names>S</given-names></name></person-group>. <article-title>AR based user adaptive compensation of metamorphopsia</article-title>. In: <source>Paper Presented at the Proceedings of the International Conference on Advanced Visual Interfaces</source>. <publisher-loc>Salerno</publisher-loc> (<year>2020</year>). p. <fpage>1</fpage>&#x02013;<lpage>5</lpage>. <pub-id pub-id-type="doi">10.1145/3399715.3399929</pub-id></citation>
</ref>
<ref id="B53">
<label>53.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Maurer</surname> <given-names>D</given-names></name> <name><surname>Mc</surname> <given-names>KS</given-names></name></person-group>. <article-title>Classification and diversity of amblyopia</article-title>. <source>Vis Neurosci.</source> (<year>2018</year>) <volume>35</volume>:<fpage>E012</fpage>. <pub-id pub-id-type="doi">10.1017/s0952523817000190</pub-id><pub-id pub-id-type="pmid">29905124</pub-id></citation></ref>
<ref id="B54">
<label>54.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Schuppe</surname> <given-names>O</given-names></name> <name><surname>Wagner</surname> <given-names>C</given-names></name> <name><surname>Koch</surname> <given-names>F</given-names></name> <name><surname>Manner</surname> <given-names>R</given-names></name></person-group>. <article-title>EYESi ophthalmoscope&#x02013;a simulator for indirect ophthalmoscopic examinations</article-title>. <source>Stud Health Technol Inform.</source> (<year>2009</year>) <volume>142</volume>:<fpage>295</fpage>&#x02013;<lpage>300</lpage>. <pub-id pub-id-type="doi">10.3233/978-1-58603-964-6-295</pub-id></citation>
</ref>
<ref id="B55">
<label>55.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Acosta</surname> <given-names>D</given-names></name> <name><surname>Gu</surname> <given-names>D</given-names></name> <name><surname>Uribe-Quevedo</surname> <given-names>A</given-names></name> <name><surname>Kanev</surname> <given-names>K</given-names></name> <name><surname>Jenkin</surname> <given-names>M</given-names></name> <name><surname>Kapralos</surname> <given-names>B</given-names></name> <etal/></person-group>. <article-title>Mobile e-training tools for augmented reality eye fundus examination</article-title>. In: <source>Interactive Mobile Communication, Technologies and Learning</source>. <publisher-loc>Hamilton, ON</publisher-loc> (<year>2018</year>). p. <fpage>83</fpage>&#x02013;<lpage>92</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-030-11434-3_13</pub-id></citation>
</ref>
<ref id="B56">
<label>56.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ropelato</surname> <given-names>S</given-names></name> <name><surname>Menozzi</surname> <given-names>M</given-names></name> <name><surname>Michel</surname> <given-names>D</given-names></name> <name><surname>Siegrist</surname> <given-names>M</given-names></name></person-group>. <article-title>Augmented reality microsurgery: a tool for training micromanipulations in ophthalmic surgery using augmented reality</article-title>. <source>Simul Healthc.</source> (<year>2020</year>) <volume>15</volume>:<fpage>122</fpage>&#x02013;<lpage>7</lpage>. <pub-id pub-id-type="doi">10.1097/SIH.0000000000000413</pub-id><pub-id pub-id-type="pmid">32044852</pub-id></citation></ref>
<ref id="B57">
<label>57.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Huang</surname> <given-names>YH</given-names></name> <name><surname>Chang</surname> <given-names>HY</given-names></name> <name><surname>Yang</surname> <given-names>WL</given-names></name> <name><surname>Chiu</surname> <given-names>YK</given-names></name> <name><surname>Yu</surname> <given-names>TC</given-names></name> <name><surname>Tsai</surname> <given-names>PH</given-names></name> <etal/></person-group>. <article-title>CatAR: a novel stereoscopic augmented reality cataract surgery training system with dexterous instruments tracking technology</article-title>. In: <source>2018 CHI Conference on Human Factors in Computing Systems</source>. <publisher-loc>Montreal, QC</publisher-loc> (<year>2018</year>). p. <fpage>1</fpage>&#x02013;<lpage>12</lpage>. <pub-id pub-id-type="doi">10.1145/3173574.3174039</pub-id></citation>
</ref>
<ref id="B58">
<label>58.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Weiss</surname> <given-names>RS</given-names></name> <name><surname>Park</surname> <given-names>S</given-names></name></person-group>. <article-title>Recent updates on myopia control: preventing progression 1 diopter at a time</article-title>. <source>Curr Opin Ophthalmol.</source> (<year>2019</year>) <volume>30</volume>:<fpage>215</fpage>&#x02013;<lpage>9</lpage>. <pub-id pub-id-type="doi">10.1097/ICU.0000000000000571</pub-id><pub-id pub-id-type="pmid">31033732</pub-id></citation></ref>
<ref id="B59">
<label>59.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Williams</surname> <given-names>AM</given-names></name> <name><surname>Muir</surname> <given-names>KW</given-names></name> <name><surname>Rosdahl</surname> <given-names>JA</given-names></name></person-group>. <article-title>Readability of patient education materials in ophthalmology: a single-institution study and systematic review</article-title>. <source>BMC Ophthalmol.</source> (<year>2016</year>) <volume>16</volume>:<fpage>133</fpage>. <pub-id pub-id-type="doi">10.1186/s12886-016-0315-0</pub-id><pub-id pub-id-type="pmid">27487960</pub-id></citation></ref>
<ref id="B60">
<label>60.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Ates</surname> <given-names>HC</given-names></name> <name><surname>Fiannaca</surname> <given-names>A</given-names></name> <name><surname>Folmer</surname> <given-names>E</given-names></name></person-group>. <article-title>Immersive simulation of visual impairments using a wearable see-through display</article-title>. In: <source>The 9th International Conference on Tangible, Embedded, and Embodied Interaction</source>. <publisher-loc>Stanford, CA</publisher-loc> (<year>2015</year>). p. <fpage>225</fpage>&#x02013;<lpage>8</lpage>. <pub-id pub-id-type="doi">10.1145/2677199.2680551</pub-id></citation>
</ref>
<ref id="B61">
<label>61.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Jones</surname> <given-names>PR</given-names></name> <name><surname>Ometto</surname> <given-names>G</given-names></name></person-group>. <article-title>Degraded reality: using VR/AR to simulate visual impairments</article-title>. In: <source>2018 IEEEWorkshop on Augmented and Virtual Realities for Good (VAR4Good)</source>. <publisher-loc>Reutlingen</publisher-loc> (<year>2018</year>). p. <fpage>1</fpage>&#x02013;<lpage>4</lpage>. <pub-id pub-id-type="doi">10.1109/VAR4GOOD.2018.8576885</pub-id><pub-id pub-id-type="pmid">27295638</pub-id></citation></ref>
<ref id="B62">
<label>62.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Jones</surname> <given-names>PR</given-names></name> <name><surname>Somoske&#x000F6;y</surname> <given-names>T</given-names></name> <name><surname>Chow-Wing-Bom</surname> <given-names>H</given-names></name> <name><surname>Crabb</surname> <given-names>DP</given-names></name></person-group>. <article-title>Seeing other perspectives: evaluating the use of virtual and augmented reality to simulate visual impairments (OpenVisSim)</article-title>. <source>npj Digit Med.</source> (<year>2020</year>) <volume>3</volume>:<fpage>32</fpage>. <pub-id pub-id-type="doi">10.1038/s41746-020-0242-6</pub-id><pub-id pub-id-type="pmid">32195367</pub-id></citation></ref>
<ref id="B63">
<label>63.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Kr&#x000F6;sl</surname> <given-names>K</given-names></name> <name><surname>Elvezio</surname> <given-names>C</given-names></name> <name><surname>Luidolt</surname> <given-names>LR</given-names></name> <name><surname>H&#x000FC;rbe</surname> <given-names>M</given-names></name> <name><surname>Karst</surname> <given-names>S</given-names></name> <name><surname>Feiner</surname> <given-names>S</given-names></name> <etal/></person-group>. <article-title>CatARact: simulating cataracts in augmented reality</article-title>. In: <source>2020 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)</source>. <publisher-loc>Porto de Galinhas</publisher-loc> (<year>2020</year>). p. <fpage>682</fpage>&#x02013;<lpage>93</lpage>. <pub-id pub-id-type="doi">10.1109/ISMAR50242.2020.00098</pub-id><pub-id pub-id-type="pmid">27295638</pub-id></citation></ref>
<ref id="B64">
<label>64.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Berger</surname> <given-names>JW</given-names></name> <name><surname>Madjarov</surname> <given-names>B</given-names></name></person-group>. <article-title>Augmented reality fundus biomicroscopy: a working clinical prototype</article-title>. <source>Archiv Ophthalmol.</source> (<year>2001</year>) <volume>119</volume>:<fpage>1815</fpage>&#x02013;<lpage>8</lpage>. <pub-id pub-id-type="doi">10.1001/archopht.119.12.1815</pub-id><pub-id pub-id-type="pmid">11735793</pub-id></citation></ref>
<ref id="B65">
<label>65.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Devalla</surname> <given-names>SK</given-names></name> <name><surname>Liang</surname> <given-names>Z</given-names></name> <name><surname>Pham</surname> <given-names>TH</given-names></name> <name><surname>Boote</surname> <given-names>C</given-names></name> <name><surname>Strouthidis</surname> <given-names>NG</given-names></name> <name><surname>Thiery</surname> <given-names>AH</given-names></name> <etal/></person-group>. <article-title>Glaucoma management in the era of artificial intelligence</article-title>. <source>Br J Ophthalmol.</source> (<year>2020</year>) <volume>104</volume>:<fpage>301</fpage>&#x02013;<lpage>11</lpage>. <pub-id pub-id-type="doi">10.1136/bjophthalmol-2019-315016</pub-id><pub-id pub-id-type="pmid">31640973</pub-id></citation></ref>
<ref id="B66">
<label>66.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chen</surname> <given-names>JJ</given-names></name></person-group>. <article-title>Optical coherence tomography and neuro-ophthalmology</article-title>. <source>J Neuro Ophthalmol.</source> (<year>2018</year>) <volume>38</volume>:<fpage>e5</fpage>&#x02013;<lpage>8</lpage>. <pub-id pub-id-type="doi">10.1097/WNO.0000000000000505</pub-id><pub-id pub-id-type="pmid">28266953</pub-id></citation></ref>
<ref id="B67">
<label>67.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Roodaki</surname> <given-names>H</given-names></name> <name><surname>Filippatos</surname> <given-names>K</given-names></name> <name><surname>Eslami</surname> <given-names>A</given-names></name> <name><surname>Navab</surname> <given-names>N</given-names></name></person-group>. <article-title>Introducing augmented reality to optical coherence tomography in ophthalmic microsurgery</article-title>. In: <source>2015 IEEE Q18 International Symposium on Mixed and Augmented Reality</source>. <publisher-loc>Fukuoka</publisher-loc> (<year>2015</year>). p. <fpage>1</fpage>&#x02013;<lpage>6</lpage>. <pub-id pub-id-type="doi">10.1109/ISMAR.2015.15</pub-id><pub-id pub-id-type="pmid">27295638</pub-id></citation></ref>
<ref id="B68">
<label>68.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tang</surname> <given-names>N</given-names></name> <name><surname>Fan</surname> <given-names>J</given-names></name> <name><surname>Wang</surname> <given-names>P</given-names></name> <name><surname>Shi</surname> <given-names>G</given-names></name></person-group>. <article-title>Microscope integrated optical coherence tomography system combined with augmented reality</article-title>. <source>Opt Express.</source> (<year>2021</year>) <volume>29</volume>:<fpage>9407</fpage>&#x02013;<lpage>18</lpage>. <pub-id pub-id-type="doi">10.1364/OE.420375</pub-id><pub-id pub-id-type="pmid">33820369</pub-id></citation></ref>
<ref id="B69">
<label>69.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pan</surname> <given-names>J</given-names></name> <name><surname>Liu</surname> <given-names>W</given-names></name> <name><surname>Ge</surname> <given-names>P</given-names></name> <name><surname>Li</surname> <given-names>F</given-names></name> <name><surname>Shi</surname> <given-names>W</given-names></name> <name><surname>Jia</surname> <given-names>L</given-names></name> <etal/></person-group>. <article-title>Real-time segmentation and tracking of excised corneal contour by deep neural networks for DALK surgical navigation</article-title>. <source>Comput Methods Programs Biomed.</source> (<year>2020</year>) <volume>197</volume>:<fpage>105679</fpage>. <pub-id pub-id-type="doi">10.1016/j.cmpb.2020.105679</pub-id><pub-id pub-id-type="pmid">32814253</pub-id></citation></ref>
<ref id="B70">
<label>70.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ong</surname> <given-names>CW</given-names></name> <name><surname>Tan</surname> <given-names>MCJ</given-names></name> <name><surname>Lam</surname> <given-names>M</given-names></name> <name><surname>Koh</surname> <given-names>VTC</given-names></name></person-group>. <article-title>Applications of extended reality in ophthalmology: systematic review</article-title>. <source>J Med Internet Res.</source> (<year>2021</year>) <volume>23</volume>:<fpage>e24152</fpage>. <pub-id pub-id-type="doi">10.2196/24152</pub-id><pub-id pub-id-type="pmid">34420929</pub-id></citation></ref>
<ref id="B71">
<label>71.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rosenfeld</surname> <given-names>JV</given-names></name> <name><surname>Wong</surname> <given-names>YT</given-names></name></person-group>. <article-title>Neurobionics and the brain-computer interface: current applications and future horizons</article-title>. <source>Med J Aust.</source> (<year>2017</year>) <volume>206</volume>:<fpage>363</fpage>&#x02013;<lpage>8</lpage>. <pub-id pub-id-type="doi">10.5694/mja16.01011</pub-id><pub-id pub-id-type="pmid">28446119</pub-id></citation></ref>
<ref id="B72">
<label>72.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Fanlo Zarazaga</surname> <given-names>A</given-names></name> <name><surname>Guti&#x000E9;rrez V&#x000E1;squez</surname> <given-names>J</given-names></name> <name><surname>Pueyo Royo</surname> <given-names>V</given-names></name></person-group>. <article-title>Review of the main colour vision clinical assessment tests</article-title>. <source>Arch Soc Esp Oftalmol.</source> (<year>2019</year>) <volume>94</volume>:<fpage>25</fpage>&#x02013;<lpage>32</lpage>. <pub-id pub-id-type="doi">10.1016/j.oftal.2018.08.006</pub-id><pub-id pub-id-type="pmid">30361001</pub-id></citation></ref>
<ref id="B73">
<label>73.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Aydindogan</surname> <given-names>G</given-names></name> <name><surname>Kavakli</surname> <given-names>K</given-names></name> <name><surname>Sahin</surname> <given-names>A</given-names></name> <name><surname>Artal</surname> <given-names>P</given-names></name> <name><surname>&#x000DC;rey</surname> <given-names>H</given-names></name></person-group>. <article-title>Applications of augmented reality in ophthalmology [invited]</article-title>. <source>Biomed Opt Express</source>. (<year>2021</year>) <volume>12</volume>:<fpage>511</fpage>&#x02013;<lpage>38</lpage>. <pub-id pub-id-type="doi">10.1364/boe.405026</pub-id><pub-id pub-id-type="pmid">33659087</pub-id></citation></ref>
<ref id="B74">
<label>74.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Atchison</surname> <given-names>DA</given-names></name></person-group>. <article-title>Optics of the human eye</article-title>. In: Guenther BD, Steel DG, editors. <source>Encyclopedia of Modern Optics</source>. 2nd ed. <volume>Vol. 5</volume>. <publisher-loc>Amsterdam</publisher-loc>: <publisher-name>Elsevier</publisher-name> (<year>2018</year>). p. <fpage>43</fpage>&#x02013;<lpage>63</lpage>.</citation>
</ref>
<ref id="B75">
<label>75.</label>
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Shorgin</surname> <given-names>S</given-names></name> <name><surname>Samouylov</surname> <given-names>K</given-names></name> <name><surname>Gudkova</surname> <given-names>I</given-names></name> <name><surname>Galinina</surname> <given-names>O</given-names></name> <name><surname>Andreev</surname> <given-names>S</given-names></name></person-group>. <article-title>On the benefits of 5G wireless technology for future mobile cloud computing</article-title>. In: <source>2014 International Science and Technology Conference (Modern Networking Technologies) (MoNeTeC)</source>. <publisher-loc>Moscow</publisher-loc> (<year>2014</year>). p. <fpage>1</fpage>&#x02013;<lpage>4</lpage>. <pub-id pub-id-type="doi">10.1109/MoNeTeC.2014.6995601</pub-id><pub-id pub-id-type="pmid">27295638</pub-id></citation></ref>
<ref id="B76">
<label>76.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>De Pace</surname> <given-names>F</given-names></name> <name><surname>Manuri</surname> <given-names>F</given-names></name> <name><surname>Sanna</surname> <given-names>A</given-names></name></person-group>. <article-title>Augmented reality in industry 4</article-title>.0. <source>Am J Comput Sci Inform Technol.</source> (<year>2018</year>) <volume>6</volume>:<fpage>1</fpage>&#x02013;<lpage>7</lpage>. <pub-id pub-id-type="doi">10.21767/2349-3917.100017</pub-id></citation>
</ref>
<ref id="B77">
<label>77.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Salih</surname> <given-names>AE</given-names></name> <name><surname>Elsherif</surname> <given-names>M</given-names></name> <name><surname>Ali</surname> <given-names>M</given-names></name> <name><surname>Vahdati</surname> <given-names>N</given-names></name> <name><surname>Yetisen</surname> <given-names>AK</given-names></name> <name><surname>Butt</surname> <given-names>H</given-names></name></person-group>. <article-title>Ophthalmic wearable devices for color blindness management</article-title>. <source>Adv Mater Technol.</source> (<year>2020</year>) <volume>5</volume>:<fpage>1901134</fpage>. <pub-id pub-id-type="doi">10.1002/admt.201901134</pub-id><pub-id pub-id-type="pmid">25855820</pub-id></citation></ref>
<ref id="B78">
<label>78.</label>
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>S</given-names></name> <name><surname>Tan</surname> <given-names>H</given-names></name> <name><surname>Rui</surname> <given-names>X</given-names></name> <name><surname>Yu</surname> <given-names>Y</given-names></name></person-group>. <article-title>Vanadium-based materials: next generation electrodes powering the battery revolution?</article-title> <source>Acc Chem Res.</source> (<year>2020</year>) <volume>53</volume>:<fpage>1660</fpage>&#x02013;<lpage>71</lpage>. <pub-id pub-id-type="doi">10.1021/acs.accounts.0c00362</pub-id><pub-id pub-id-type="pmid">32709195</pub-id></citation></ref>
</ref-list>
<glossary>
<def-list>
<title>Abbreviations</title>
<def-item><term>AR</term>
<def><p>augmented reality</p></def></def-item>
<def-item><term>CVD</term>
<def><p>color vision deficiency</p></def></def-item>
<def-item><term>HMD</term>
<def><p>head-mounted displays</p></def></def-item>
<def-item><term>OCT</term>
<def><p>optical coherence tomography</p></def></def-item>
<def-item><term>OST</term>
<def><p>optical see-through</p></def></def-item>
<def-item><term>VFD</term>
<def><p>visual field defects</p></def></def-item>
<def-item><term>VST</term>
<def><p>video see-through.</p></def></def-item>
</def-list>
</glossary>
<fn-group>
<fn id="fn0001"><p><sup>1</sup><italic>International Classification of Diseases</italic>. Available online at: <ext-link ext-link-type="uri" xlink:href="https://icd.who.int/browse10/2019/en&#x00023;/H53-H54">https://icd.who.int/browse10/2019/en&#x00023;/H53-H54</ext-link></p></fn>
</fn-group>
</back>
</article>