<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Hum. Neurosci.</journal-id>
<journal-title>Frontiers in Human Neuroscience</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Hum. Neurosci.</abbrev-journal-title>
<issn pub-type="epub">1662-5161</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fnhum.2019.00374</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Human Neuroscience</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Neural Networks Supporting Phoneme Monitoring Are Modulated by Phonology but Not Lexicality or Iconicity: Evidence From British and Swedish Sign Language</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name><surname>Rudner</surname> <given-names>Mary</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x0002A;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/39975/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Orfanidou</surname> <given-names>Eleni</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>K&#x000E4;stner</surname> <given-names>Lena</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/265666/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Cardin</surname> <given-names>Velia</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/784167/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Woll</surname> <given-names>Bencie</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/36082/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Capek</surname> <given-names>Cheryl M.</given-names></name>
<xref ref-type="aff" rid="aff6"><sup>6</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/113393/overview"/>
</contrib> 
<contrib contrib-type="author">
<name><surname>R&#x000F6;nnberg</surname> <given-names>Jerker</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/75267/overview"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Linnaeus Centre HEAD, Swedish Institute for Disability Research, Department of Behavioural Sciences and Learning, Link&#x000F6;ping University</institution>, <addr-line>Link&#x000F6;ping</addr-line>, <country>Sweden</country></aff>
<aff id="aff2"><sup>2</sup><institution>Deafness, Cognition and Language Research Centre, Department of Experimental Psychology, University College London</institution>, <addr-line>London</addr-line>, <country>United Kingdom</country></aff>
<aff id="aff3"><sup>3</sup><institution>School of Psychology, University of Crete</institution>, <addr-line>Rethymno</addr-line>, <country>Greece</country></aff>
<aff id="aff4"><sup>4</sup><institution>Department of Philosophy, Saarland University</institution>, <addr-line>Saarbr&#x000FC;cken</addr-line>, <country>Germany</country></aff>
<aff id="aff5"><sup>5</sup><institution>School of Psychology, University of East Anglia</institution>, <addr-line>Norwich</addr-line>, <country>United Kingdom</country></aff>
<aff id="aff6"><sup>6</sup><institution>Division of Neuroscience &#x00026; Experimental Psychology, School of Biological Sciences, University of Manchester</institution>, <addr-line>Manchester</addr-line>, <country>United Kingdom</country></aff>
<author-notes>
<fn fn-type="edited-by"><p>Edited by: Tamer Demiralp, Istanbul University, Turkey</p></fn>
<fn fn-type="edited-by"><p>Reviewed by: Diane Brentari, University of Chicago, United States; Cristina Baus, Pompeu Fabra University, Spain</p></fn>
<corresp id="c001">&#x0002A;Correspondence: Mary Rudner <email>mary.rudner&#x00040;liu.se</email></corresp>
<fn fn-type="other" id="fn001"><p><bold>Specialty section:</bold>This article was submitted to Speech and Language, a section of the journal Frontiers in Human Neuroscience</p></fn>
</author-notes>
<pub-date pub-type="epub">
<day>22</day>
<month>10</month>
<year>2019</year>
</pub-date>
<pub-date pub-type="collection">
<year>2019</year>
</pub-date>
<volume>13</volume>
<elocation-id>374</elocation-id>
<history>
<date date-type="received">
<day>27</day>
<month>12</month>
<year>2018</year>
</date>
<date date-type="accepted">
<day>03</day>
<month>10</month>
<year>2019</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x000A9; 2019 Rudner, Orfanidou, K&#x000E4;stner, Cardin, Woll, Capek and R&#x000F6;nnberg.</copyright-statement>
<copyright-year>2019</copyright-year>
<copyright-holder>Rudner, Orfanidou, K&#x000E4;stner, Cardin, Woll, Capek and R&#x000F6;nnberg</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract><p>Sign languages are natural languages in the visual domain. Because they lack a written form, they provide a sharper tool than spoken languages for investigating lexicality effects which may be confounded by orthographic processing. In a previous study, we showed that the neural networks supporting phoneme monitoring in deaf British Sign Language (BSL) users are modulated by phonology but not lexicality or iconicity. In the present study, we investigated whether this pattern generalizes to deaf Swedish Sign Language (SSL) users. British and SSLs have a largely overlapping phoneme inventory but are mutually unintelligible because lexical overlap is small. This is important because it means that even when signs lexicalized in BSL are unintelligible to users of SSL they are usually still phonologically acceptable. During fMRI scanning, deaf users of the two different sign languages monitored signs that were lexicalized in either one or both of those languages for phonologically contrastive elements. Neural activation patterns relating to different linguistic levels of processing were similar across SLs; in particular, we found no effect of lexicality, supporting the notion that apparent lexicality effects on sublexical processing of speech may be driven by orthographic strategies. As expected, we found an effect of phonology but not iconicity. Further, there was a difference in neural activation between the two groups in a motion-processing region of the left occipital cortex, possibly driven by cultural differences, such as education. Importantly, this difference was not modulated by the linguistic characteristics of the material, underscoring the robustness of the neural activation patterns relating to different linguistic levels of processing.</p></abstract>
<kwd-group>
<kwd>sign language</kwd>
<kwd>lexicality</kwd>
<kwd>iconicity</kwd>
<kwd>semantics</kwd>
<kwd>phonology</kwd>
<kwd>language processing</kwd>
</kwd-group>
<counts>
<fig-count count="3"/>
<table-count count="0"/>
<equation-count count="0"/>
<ref-count count="41"/>
<page-count count="11"/>
<word-count count="8337"/>
</counts>
</article-meta>
</front>
<body>
<sec sec-type="introduction" id="s1">
<title>Introduction</title>
<p>Natural sign languages can be described using the same linguistic terminology as spoken languages (Sandler and Lillo-Martin, <xref ref-type="bibr" rid="B27">2006</xref>). For example, the term lexicality refers to whether or not an item belongs to the vocabulary of a particular language. This definition applies to both sign languages and spoken languages, indicating theoretical equivalence. Functional equivalence of sign language and spoken language is indicated by the fact that they follow similar developmental milestones (Emmorey, <xref ref-type="bibr" rid="B10">2002</xref>) and show similar neural representation (R&#x000F6;nnberg et al., <xref ref-type="bibr" rid="B26">2000</xref>; MacSweeney et al., <xref ref-type="bibr" rid="B18">2008</xref>). This means that natural sign languages provide a tool for investigating the neural underpinnings of aspects of linguistic processing that are hard to isolate using spoken languages. For example, apparent lexicality effects relating to word processing can be confounded by grapheme-phoneme conversion because lexical access may take place <italic>via</italic> the orthographic route, even when stimulus items are speech recordings (Xiao et al., <xref ref-type="bibr" rid="B38">2005</xref>). Such a strategy is likely to reveal lexicality <italic>via</italic> orthography even in the absence of lexicality effects <italic>via</italic> an auditory route. Sign languages have semantics and phonology but no orthography<xref ref-type="fn" rid="fn0001"><sup>1</sup></xref>. Thus, they allow us to investigate the influence of lexicality on language processing without the confounding effects of orthographic processing.</p>
<p>Grosvald et al. (<xref ref-type="bibr" rid="B12">2012</xref>) studied the effect of lexicality on sign language processing using a sign language analog of the kind of phoneme-monitoring task that has often been used in studies of spoken language. In the classic phoneme-monitoring task (e.g., Pitt and Samuel, <xref ref-type="bibr" rid="B25">1995</xref>) participants give a button-press response when they recognize a target phoneme in a spoken word or non-word. In the sign-based version introduced by Grosvald et al. (<xref ref-type="bibr" rid="B12">2012</xref>), participants responded to short videos of signs and non-signs when the stimulus displayed a target handshape. Handshape is one of three recognized contrastive phonological components of sign language; the others are location and movement (Sandler and Lillo-Martin, <xref ref-type="bibr" rid="B27">2006</xref>). Handshape refers to the form of the signing hand or hands, location refers to the position in space or in contact with the body where the sign is articulated, and movement refers to the path traced by the signing hand or hands. The study by Grosvald et al. (<xref ref-type="bibr" rid="B12">2012</xref>) revealed no effect of lexicality on sign processing, as deaf signers showed no difference in the accuracy or latency of their responses to signs and non-signs during handshape monitoring. This finding was in line with Carreiras et al. (<xref ref-type="bibr" rid="B1800">2008</xref>) who showed no effect of lexicality on handshape priming, although they did report evidence that lexicality influenced location priming. Further, Guti&#x000E9;rrez et al. (<xref ref-type="bibr" rid="B1801">2012</xref>) reported lexicality effects on ERP modulation elicited by phonological priming; the effects differed for location (modulating the N400) and handshape (modulating a later component), but both handshape and location priming were affected by lexicality. Thus, there is neurophysiological evidence that lexicality influences sublexical processing of sign language relating to handshape and location, and behavioral evidence that it influences sublexical processing of sign language relating to location.</p>
<p>In a recent study (Cardin et al., <xref ref-type="bibr" rid="B5">2016</xref>), we administered a sign-based phoneme-monitoring task (see Grosvald et al., <xref ref-type="bibr" rid="B12">2012</xref>). The stimuli were manual actions that belonged to four different categories: British Sign Language (BSL, lexicalized in BSL but not Swedish Sign Language, SSL); SSL (lexicalized in SSL but not BSL); Cognates (lexicalized in both BSL and SSL, and rated as more iconic than items in the BSL and SSL categories); Non-signs (made-up signs which violated the phonological conventions of both BSL and SSL and were lexicalized in neither). It is important to note that the illegal non-signs used in Cardin et al. (<xref ref-type="bibr" rid="B5">2016</xref>) were qualitatively different from the legal non-signs used by Grosvald et al. (<xref ref-type="bibr" rid="B12">2012</xref>) as well as by Carreiras et al. (<xref ref-type="bibr" rid="B1800">2008</xref>) and Guti&#x000E9;rrez et al. (<xref ref-type="bibr" rid="B1801">2012</xref>). The participants in Cardin et al. (<xref ref-type="bibr" rid="B5">2016</xref>) were deaf signers, deaf non-signers and hearing non-signers, all with a British cultural background and so the SSL stimuli were the equivalent of the legal non-signs used in the previous studies (Carreiras et al., <xref ref-type="bibr" rid="B1800">2008</xref>; Grosvald et al., <xref ref-type="bibr" rid="B12">2012</xref>; Guti&#x000E9;rrez et al., <xref ref-type="bibr" rid="B1801">2012</xref>). The advantage of using signs from a mutually unintelligible sign language instead of made-up signs is that lexicalized and non-lexicalized items are all natural signs.</p>
<p>During fMRI scanning, the participants in the study by Cardin et al. (<xref ref-type="bibr" rid="B5">2016</xref>) were cued to monitor the manual actions in each of the four categories for handshape and location in two different versions of the phoneme-monitoring task. The results showed an effect of phonological violation (i.e., a difference between legal signs and illegal non-signs) on the neural networks supporting phoneme monitoring, but no effects related to lexicality (i.e., contrasting lexicalized BSL and non-lexicalized SSL to determine neural activation relating to whether or not stimulus items were part of the participant&#x02019;s vocabulary) or iconicity (i.e., contrasting Cognates and BSL, which differed in iconicity or the extent to which stimulus items looked like their referents). Indeed, non-signs compared to signs elicited stronger activation in an action observation network in all participants. This indicates greater processing demands for illegal compared to legal signs, irrespective of sign language knowledge and suggests that the phonological characteristics of language may be determined by neural processing efficiency. The absence of a lexicality effect was in line with behavioral work showing no effect of lexicality on handshape monitoring (Grosvald et al., <xref ref-type="bibr" rid="B12">2012</xref>) and imaging work showing no effect of lexicality on processing single signs (Petitto et al., <xref ref-type="bibr" rid="B24">2000</xref>). However, it was not in line with work showing neurophysiological effects of lexicality on handshape processing (Guti&#x000E9;rrez et al., <xref ref-type="bibr" rid="B1801">2012</xref>) and both behavioral (Carreiras et al., <xref ref-type="bibr" rid="B1800">2008</xref>) and neurophysiological effects of lexicality on location processing (Guti&#x000E9;rrez et al., <xref ref-type="bibr" rid="B1801">2012</xref>). Furthermore, the results of Cardin et al. (<xref ref-type="bibr" rid="B5">2016</xref>) did not support work showing greater activation in left inferior frontal gyrus for processing signed sentences than pseudosentences consisting of non-linguistic manual actions (MacSweeney et al., <xref ref-type="bibr" rid="B17">2004</xref>) for deaf signers, and greater activation in left angular and supramarginal gyri for processing gestures with meaning compared to those without (Husain et al., <xref ref-type="bibr" rid="B14">2012</xref>). Finally, the results of Cardin et al. (<xref ref-type="bibr" rid="B5">2016</xref>) did not support findings relating to speech processing showing differences in neural networks underpinning phoneme monitoring with spoken words and non-words (Newman and Twieg, <xref ref-type="bibr" rid="B22">2001</xref>;Xiao et al., <xref ref-type="bibr" rid="B38">2005</xref>).</p>
<p>In the present study, we honed the experimental design to focus on lexicality. We achieved this by recruiting deaf signers who were native users of either BSL or SSL but who had no knowledge of the other sign language. This allowed us to use BSL as lexical items and SSL as non-lexical items for the BSL signers and vice versa for the SSL signers, thus avoiding the confound of using different materials for these conditions. Cognates and non-signs had the same status for both groups. The tasks and the materials were identical to those used in Cardin et al. (<xref ref-type="bibr" rid="B5">2016</xref>).</p>
<p>We predicted that if lexicality does influence the neural networks that support phoneme monitoring, as suggested by previous work showing effects of lexicality phoneme monitoring of spoken words (Newman and Twieg, <xref ref-type="bibr" rid="B22">2001</xref>; Xiao et al., <xref ref-type="bibr" rid="B38">2005</xref>) as well as on sublexical (Carreiras et al., <xref ref-type="bibr" rid="B1800">2008</xref>; Guti&#x000E9;rrez et al., <xref ref-type="bibr" rid="B1801">2012</xref>) and lexical (MacSweeney et al., <xref ref-type="bibr" rid="B17">2004</xref>) processing of sign language, the fMRI results of our experiment would reveal this. We were also open to possibility that effects of iconicity and phonology on neural networks might differ from those in our previous study because lexicality was involved in both the underlying contrasts. We expected to replicate the significant effect of task observed in our previous study. To confirm our assumption of good experimental control, we tested for main effects of, and interactions with, task and group.</p>
</sec>
<sec sec-type="materials and methods" id="s2">
<title>Materials and Methods</title>
<sec id="s2-1">
<title>Participants</title>
<p>Fourteen deaf native British BSL signers and 16 deaf native Swedish SSL signers participated in this study. There were eight women in each group. The data from the British signers are included in group analyses reported in Cardin et al. (<xref ref-type="bibr" rid="B5">2016</xref>) that compare signers with non-signers. Data from both British and Swedish signers are included in other analyses comparing signers with non-signers reported in Cardin et al. (<xref ref-type="bibr" rid="B6">2013</xref>). The comparison between the two groups of deaf signers from different cultural backgrounds is reported here for the first time. Native signers were defined in the present study as signers with at least one deaf parent who acquired SL from birth through their family. The native language of the British signers was BSL and the native language of the Swedish signers was SSL. All participants stated that they had no knowledge of the non-native sign language used in the study, i.e., SSL for British signers and BSL for Swedish signers, and their familiarity with stimulus items was tested (see &#x0201C;Testing Procedure&#x0201D; section). All participants were screened for deafness by obtaining the pure tone average hearing threshold in decibels (dB) across the frequencies 1 kHz, 2 kHz and 4 kHz (British: <italic>M</italic> = 99.40, <italic>SD</italic> = 8.66; Swedish: <italic>M</italic> = 99.05, <italic>SD</italic> = 11.01) and they were all right handed (Oldfield, <xref ref-type="bibr" rid="B23">1971</xref>).</p>
<p>There was no statistically significant difference in age between groups (British: 29&#x02013;60, <italic>M</italic> = 38.07, <italic>SD</italic> = 11.91; Swedish: 23&#x02013;54, <italic>M</italic> = 34.14, <italic>SD</italic> = 10.11, <italic>t</italic> = 0.36, <italic>p</italic> &#x0003E; 0.05). Non-verbal intelligence was assessed using the block design subtest of the Wechsler Abbreviated Scale of Intelligence (British: <italic>M</italic> = 62.36, <italic>SD</italic> = 5.94; Swedish: <italic>M</italic> = 58.57, <italic>SD</italic> = 5.60) and there was no statistically significant difference in block design performance between groups, <italic>t</italic> = 0.10, <italic>p</italic> &#x0003E; 0.05.</p>
</sec>
<sec id="s2-2">
<title>Materials</title>
<p>The materials were identical to those used in Cardin et al. (<xref ref-type="bibr" rid="B5">2016</xref>). One-hundred and ninety-two videoclips (2&#x02013;3 s each) of individual signs were used as experimental stimuli; an additional 48 items were used in a practice session. Items were distributed equally across the four stimulus types: BSL-only (not lexicalized in SSL), SSL-only (not lexicalized in BSL), cognates (signs with the same form and meaning in both languages), and non-signs (sign-like items that have no meaning in either BSL or SSL and which combine phonological parameters in a manner that is either illegal or non-occurring in both languages).</p>
<p>In the present study, SSL stimuli presented to BSL signers and BSL stimuli presented to SSL signers filled the same function as the pronounceable non-signs used in previous studies (Carreiras et al., <xref ref-type="bibr" rid="B1800">2008</xref>; Grosvald et al., <xref ref-type="bibr" rid="B12">2012</xref>; Guti&#x000E9;rrez et al., <xref ref-type="bibr" rid="B1801">2012</xref>). However, they had the advantages of: (a) being natural rather than constructed signs; and (b) functioning as familiar signs for the other group. Both these factors contributed to good experimental control. Non-signs were included to determine whether the effect of phonological violation reported by Cardin et al. (<xref ref-type="bibr" rid="B5">2016</xref>) generalized across sign languages.</p>
<p>All signs were rated for age of acquisition, familiarity, iconicity, and complexity by two native signers of SSL. For BSL and Cognates, age of acquisition, familiarity, iconicity ratings were taken from Vinson et al. (<xref ref-type="bibr" rid="B36">2008</xref>) while for SSL these properties, along with the complexity of all signs, were rated by two native signers of BSL. The non-signs were rated for complexity by all four raters. All raters received the same instructions in the appropriate language.</p>
<p>There was no statistically significant difference in complexity ratings across stimulus types or age of acquisition across lexical signs (BSL, SSL, cognates). There was no statistically significant difference in iconicity or familiarity ratings between BSL and SSL signs, but cognates were rated higher on both of these parameters as a natural corollary of their shared visual motivation. Iconicity was described to the raters as occurring when a sign &#x0201C;looks like its meaning,&#x0201D; and both positive and negative examples were given to ensure that the concept was fully understood. The selection of signs is described in full in Cardin et al. (<xref ref-type="bibr" rid="B5">2016</xref>).</p>
<p>All experimental stimuli were video-recorded using a high-definition digital camera. The model was a deaf native DGS (German Sign Language) signer who was not a user of either BSL or SSL. This meant that the BSL and SSL signs were equally accented. During recording, the model was visible from the hips to above the head, was seated against a blue background and wore a dark shirt. Examples of experimental stimuli are shown in <xref ref-type="fig" rid="F1">Figure 1</xref>. Apart from the experimental stimuli, there were cues to indicate which version of the task the participant should perform under each condition. The cues indicated one of six phonologically contrastive handshapes or phonologically contrastive locations and consisted of still images.</p>
<fig id="F1" position="float">
<label>Figure 1</label>
<caption><p>Examples of stills from stimulus videos in all four stimulus categories. The upper panel shows one-handed items and the lower panel shows 2-handed items. English glosses are shown on model&#x02019;s torso with the Swedish glosses of SSl signs in parentheses.</p></caption>
<graphic xlink:href="fnhum-13-00374-g0001.tif"/>
</fig>
</sec>
<sec id="s2-3">
<title>Task</title>
<p>A phoneme monitoring task was presented in two versions, one with handshape targets and the other with location targets, cued at the beginning of each block. During blocks of eight stimuli, the participant pressed a button whenever a target stimulus was identified. The behavioral dependent measure was an adapted d&#x02019; based on hits adjusted for false alarms in accordance with signal detection theory. Twelve blocks of each of the four stimulus types (BSL, SSL, cognates and non-signs) were presented in randomized order. The participants were asked to fixate on the model&#x02019;s chin (the lower face is the natural locus of gaze for signers), and a fixation cross was displayed for 500 ms before each stimulus, indicating the location of the model&#x02019;s chin in the upcoming video. Between stimuli, a blank screen was displayed for an average of 4.5 s, and between blocks, a still of the model with a fixation cross on the chin was displayed for 15 s. This constituted the baseline. Participants were instructed to press the button when the cross changed color from yellow to red.</p>
</sec>
<sec id="s2-4">
<title>Testing Procedure</title>
<p>Before the experiment, the tasks were explained to the participants in their native sign language and written instructions in English or Swedish were provided as appropriate. All participants practiced the tasks before the experiment.</p>
<p>During scanning, participants held the response box in their right hand. Two video cameras in the magnet&#x02019;s bore were used to monitor the participant&#x02019;s face and left hand. The experimenter monitored the participant&#x02019;s face to ensure that he or she was relaxed and awake throughout scanning and the participant&#x02019;s left hand to determine if the participant wished to communicate with the experimenter. A video camera in the control room allowed the experimenter to communicate with the participant between runs in SSL or BSL using the screen.</p>
<p>After scanning, all signed stimuli used in the experiment were presented to each participant one more time outside the scanner. For each stimulus they indicated whether it was a familiar sign and if so what it meant. Any item which was not correctly categorized was excluded from analysis for the particular individual.</p>
<p>All participants gave their written informed consent and were given ear-protection. This study was approved by the Swedish Regional Ethical committee and the UCL Ethical committee. All participants traveled to Birkbeck-UCL Centre of Neuroimaging in London to take part in the study and were compensated for their travel and accommodation expenses.</p>
</sec>
<sec id="s2-5">
<title>Image Acquisition and Data Analysis</title>
<p>Images were acquired with a 1.5T Siemens Avanto scanner (Siemens, Erlangen, Germany) and a 32-channel head coil at the Birkbeck-UCL Centre for Neuroimaging, London. Functional imaging data were acquired using a gradient-echo EPI sequence (repetition time = 2,975 ms, echo time = 50 ms, field of view = 192 &#x000D7; 192 mm) giving a notional resolution of 3 &#x000D7; 3 &#x000D7; 3 mm. Thirty-five slices were acquired to obtain whole-brain coverage without the cerebellum. Each experimental run consisted of 348 volumes taking approximately 17 min to acquire. The first seven volumes of each run were discarded to allow for T1 equilibration effects. An automatic shimming algorithm was used to reduce magnetic field inhomogeneities. A high resolution structural scan for anatomical localization purposes (magnetization-prepared rapid acquisition with gradient echo, repetition time = 2,730 ms, echo time = 3.57 ms, 1 mm<sup>3</sup> resolution, 176 slices) was taken either at the end or in the middle of the session.</p>
<p>Out of the total of 192 stimuli presented, data relating to on average 14 (<italic>SD</italic> = 8.5) stimuli were excluded from analysis due to incorrect categorization at post-test. Fewer non-signs were excluded than signs (<italic>p</italic> &#x0003C; 0.01) but there were no overall differences in exclusions between the different categories of signs (all <italic>p</italic>s &#x0003E; 0.05). There were fewer exclusions for Swedish (<italic>M</italic> = 8, <italic>SD</italic> = 2.8; <italic>p</italic> &#x0003C; 0.001) than British signers (<italic>M</italic> = 20, <italic>SD</italic> = 8.6). This difference was largely attributable to the SSL stimuli, all 48 of which were correctly recognized by the Swedish signers but among which the British signers also recognized on average 11 items (<italic>SD</italic> = 5.9) which were thus excluded from analysis for those individuals. The Swedish signers recognized slightly fewer non-signs (<italic>M</italic> = 0.1, <italic>SD</italic> = 0.9, <italic>p</italic> &#x0003C; 0.05) than the British signers (<italic>M</italic> = 1.8, <italic>SD</italic> = 3.0), but there were no significant differences (<italic>p</italic> &#x0003E; 0.05) in exclusions between groups for BSL or Cognates.</p>
<p>Imaging data were analyzed using Matlab 7.10 (Mathworks Inc., MA, USA) and Statistical Parametric Mapping software (SPM8; Wellcome Trust Centre for Neuroimaging, London, UK). Images were realigned, coregistered, normalized, and smoothed (8 mm FWHM Gaussian kernel) following SPM8 standard preprocessing procedures. Analysis was conducted by fitting a general linear model with regressors representing each stimulus type, task, baseline, and cue periods. For every regressor, events were modeled as a boxcar of the adequate duration, convolved with SPM&#x02019;s canonical hemodynamic response function and entered into a multiple regression analysis to generate parameter estimates for each regressor at every voxel. Movement parameters were derived from the realignment of the images and included in the model as regressors of no interest. Contrasts for each experimental stimulus type and task [e.g., (BSL location &#x0003E; Baseline)] were defined individually for each participant and taken to a second-level analysis.</p>
<p>To test for main effects of group and task, and interaction between task and group, a whole brain analysis of variance (ANOVA) was performed. The factors entered into the analysis were group (British, Swedish), task (handshape, location) and material (BSL, SSL, cognates, non-signs), resulting in a 2 &#x000D7; 4 &#x000D7; 2 ANOVA. Further analyses (described under the relevant parts of the &#x0201C;Results&#x0201D; section) were performed to isolate effects of Iconicity, Semantics, and Phonology. Significant activations at <italic>p</italic> &#x0003C; 0.05 corrected (FEW) at cluster or peak level are reported. Voxels are reported as x, y, z coordinates in accordance with standard brains from the Montreal Neurological Institute (MNI).</p>
</sec>
</sec>
<sec sec-type="results" id="s3">
<title>Results</title>
<sec id="s3-1">
<title>Behavioral Data</title>
<p>Data were analyzed by calculating a mixed repeated-measures ANOVA based on a 2 &#x000D7; 2 &#x000D7; 4 design with the factors group (British, Swedish), task (handshape, location) and material (BSL, SSL, cognates, non-signs). There was no significant main effect of group, <italic>F</italic><sub>(1,26)</sub> = 0.00, <italic>p</italic> = 0.99 or material, <italic>F</italic><sub>(3,78)</sub> = 0.84, <italic>p</italic> = 0.48. However, there was a significant main effect of task, <italic>F</italic><sub>(1,26)</sub> = 4.90, <italic>p</italic> = 0.036, and a significant interaction between task and material, <italic>F</italic><sub>(3,78)</sub> = 5.11, <italic>p</italic> = 0.003. Investigation of this interaction using paired sample <italic>t</italic>-tests with Bonferroni correction for multiple comparisons showed better performance on the location than handshape task for non-signs, <italic>t</italic><sub>(27)</sub> = 3.92, <italic>p</italic> = 0.004, while there was no significant difference in performance between tasks with any of the other materials (all <italic>p</italic>s &#x0003E; 0.05), see <xref ref-type="fig" rid="F2">Figure 2</xref>.</p>
<fig id="F2" position="float">
<label>Figure 2</label>
<caption><p>Significant interaction in behavioral performance across groups. Error bars show standard deviation. **<italic>p</italic> &#x0003C; 0.01.</p></caption>
<graphic xlink:href="fnhum-13-00374-g0002.tif"/>
</fig>
</sec>
<sec id="s3-2">
<title>fMRI Data</title>
<sec id="s3-2-1">
<title>Effect of Group</title>
<p>In the main ANOVA, there was no significant net activation for deaf native signers of SSL compared to deaf native signers of BSL. However, in the British group, compared to the Swedish group, there was significantly more activation across all conditions in a motion-processing area of the left occipital cortex (&#x02212;46 &#x02212;68 &#x02212;4; see <xref ref-type="fig" rid="F3">Figure 3</xref>). This cluster appears near the visual motion area MT/V5 (Tootell et al., <xref ref-type="bibr" rid="B34">1995</xref>). We used SPM Anatomy Toolbox (Eickhoff et al., <xref ref-type="bibr" rid="B8">2005</xref>) to determine the location of this cluster in relation to a probabilistic map of area MT/V5 (Malikovic et al., <xref ref-type="bibr" rid="B20">2007</xref>). The results of this analysis revealed that the activation cluster is anterior and dorsal to MT/V5 (Eickhoff et al., <xref ref-type="bibr" rid="B8">2005</xref>), with 10&#x02013;30% chance of being within this cytoarchitectonic area. This suggests that this activation is located within, or anteriorly to, the MT+ complex, which encompasses V5/MT and other motion-sensitive areas, and likely corresponds to a higher-order visual motion region (Kolster et al., <xref ref-type="bibr" rid="B15">2010</xref>).</p>
<fig id="F3" position="float">
<label>Figure 3</label>
<caption><p>Significantly greater BOLD response in visual motion-processing regions of the left occipital cortex for British deaf native signers than Swedish deaf native signers. The figure shows clusters significantly active (<italic>p</italic> &#x0003C; 0.05 FWE) for the contrast (BSL signers &#x0003E; SSL signers) across all conditions. The activation is rendered on the standard MNI brain. The axial slice is at <italic>z</italic> = &#x02212;4, while the sagittal slice is at <italic>x</italic> = &#x02212;46. The histogram shows effect size in each condition (NS, non-signs; COG, cognates) for each group at &#x02212;46 &#x02212;68 &#x02212;4. Error bars show standard error of mean.</p></caption>
<graphic xlink:href="fnhum-13-00374-g0003.tif"/>
</fig>
</sec>
<sec id="s3-2-2">
<title>Effect of Task</title>
<p>The handshape task minus the location task generated more activation in bilateral regions including the intraparietal sulcus (30 &#x02212;55 46, &#x02212;48 &#x02212;37 46), the ventral occipito-temporal cortex (&#x02212;24 &#x02212;70 &#x02212;8, 36 &#x02212;82 4) and the inferior frontal gyrus (&#x02212;45 5 31, 48 8 31). The location task compared to the handshape task generated more activation in the cingulate sulcus visual area (&#x02212;39 &#x02212;76 31, 48 &#x02212;76 25) and right angular gyrus (12 &#x02212;55 19). This pattern of results from a total of 28 deaf signers (Swedish and British) is similar to the pattern reported previously for 15 British deaf signers (Cardin et al., <xref ref-type="bibr" rid="B5">2016</xref>).</p>
</sec>
<sec id="s3-2-3">
<title>Effects of Lexicality, Iconicity and Phonology</title>
<sec id="s3-2-3-1">
<title>Lexicality</title>
<p>The effect of lexicality was examined by investigating neural activation for familiar signs minus unfamiliar signs across groups and tasks. For the British group, familiar signs were BSL and unfamiliar signs were SSL, while for the Swedish group, familiar signs were SSL and unfamiliar signs were BSL. There was no significant main effect of lexicality (or the opposite contrast) and there were no significant interactions with either task or group.</p>
</sec>
<sec id="s3-2-3-2">
<title>Iconicity</title>
<p>The effect of iconicity was examined by investigating neural activation for cognates (same for both groups) minus familiar signs (BSL for the British group and SSL for the Swedish group) across tasks. The cognate stimuli had a shared visual motivation across languages and were thus rated as being more iconic than the language-specific stimuli. There was no significant main effect of iconicity (or the opposite contrast) on neural activation and there were no significant interactions with either task or group.</p>
</sec>
<sec id="s3-2-3-3">
<title>Phonology</title>
<p>The effect of phonology was examined by investigating neural activation for unfamiliar signs (SSL for the British group and BSL for the Swedish group) minus non-signs (same for both groups). Because the phonological inventories of BSL and SSL are similar, even the unfamiliar signs are likely to match existing phonological representations. There was no significant main effect of phonology. However, the opposite contrast showed activation in the supramarginal gyrus bilaterally (34 &#x02212;46 49, &#x02212;57 &#x02212;25 41, 55 32 30, &#x02212;31 &#x02212;87 23) as well as in the left parietal lobule (&#x02212;35 &#x02212;42 46, &#x02212;17 &#x02212;63 53) and superior frontal gyrus (&#x02212;24 &#x02212;2 61), in agreement with Cardin et al. (<xref ref-type="bibr" rid="B5">2016</xref>). No significant interaction with task or group was found with either contrast.</p>
</sec>
</sec>
</sec>
</sec>
<sec sec-type="discussion" id="s4">
<title>Discussion</title>
<p>In the present study, we investigated the effects of lexicality, iconicity and phonology on phoneme monitoring. We used sign language to avoid the confounding effect of orthographic recoding associated with spoken language and crossed materials across groups to avoid the confounding effect of using different materials for lexical and non-lexical items. fMRI results showed no significant effect of lexicality or iconicity on the neural networks associated with phoneme monitoring, and the effect of phonological violation agreed with previous work (Cardin et al., <xref ref-type="bibr" rid="B5">2016</xref>). In addition, we found that a motion-processing region of the left occipital cortex was activated more by the British signers than the Swedish signers during phoneme monitoring, irrespective of material or task.</p>
<sec id="s4-1">
<title>Lexicality</title>
<p>In the present study, lexicality was operationalized as the contrast between BSL and SSL, where BSL signs constituted lexical items and SSL non-lexical items for the BSL signers and vice versa for the SSL signers. This meant that both BSL and SSL signs occurred as both lexical items and non-lexical items making for good experimental control. The results of the present study revealed no effect of lexicality on the neural networks supporting phoneme monitoring (handshape and location). This is in line with Grosvald et al. (<xref ref-type="bibr" rid="B12">2012</xref>) who showed no effect of lexicality on speed or accuracy of handshape monitoring and Petitto et al. (<xref ref-type="bibr" rid="B24">2000</xref>) who showed no effect of lexicality on the neural networks supporting passive observation of single signs. It also extends our own previous work (Cardin et al., <xref ref-type="bibr" rid="B5">2016</xref>) by showing that the lexicality effect is absent during phoneme monitoring even when possible effects of materials are controlled for and power is increased. Further, it shows that the effect generalizes from BSL to another sign language&#x02014;SSL. Moreover, fewer items had to be excluded from analysis for Swedish than British signers, increasing the reliability of the present analysis.</p>
<p>However, the findings of the present study are at odds not only with studies showing an effect of lexicality on the neural networks underpinning phoneme monitoring of spoken words and non-words (Newman and Twieg, <xref ref-type="bibr" rid="B22">2001</xref>; Xiao et al., <xref ref-type="bibr" rid="B38">2005</xref>) but also studies that have shown an effect of lexicality on sublexical processing of sign language (Carreiras et al., <xref ref-type="bibr" rid="B1800">2008</xref>; Guti&#x000E9;rrez et al., <xref ref-type="bibr" rid="B1801">2012</xref>) as well as studies that have reported an effect of meaningfulness on the neural networks supporting processing of manual actions (MacSweeney et al., <xref ref-type="bibr" rid="B17">2004</xref>; Husain et al., <xref ref-type="bibr" rid="B14">2012</xref>).</p>
<p>The studies by Carreiras et al. (<xref ref-type="bibr" rid="B1800">2008</xref>, Experiment 3) and Guti&#x000E9;rrez et al. (<xref ref-type="bibr" rid="B1801">2012</xref>) required the participants to make a lexical decision on a target sign or pronounceable non-sign preceded by a phonologically related or unrelated item. Thus, the task had a direct bearing on the lexicality of the target item. The task used in the study by MacSweeney et al. (<xref ref-type="bibr" rid="B17">2004</xref>), although not directly related to lexicality, required participants to monitor for semantic anomaly, i.e., non-meaningfulness, among BSL sentences and strings of manual actions belonging to a non-linguistic manual-brachial code known as TicTac sometimes used by bookmakers at racecourses. In the study by Husain et al. (<xref ref-type="bibr" rid="B14">2012</xref>), participants performed a delayed match to sample task in which they were instructed in one version to determine whether the target gesture was identical to the sample gesture, and in the other version to determine whether the target belonged to the same category as the sample, the two possible categories being emblematic gestures and meaningless gestures. Thus, the task used by Husain et al. (<xref ref-type="bibr" rid="B14">2012</xref>) also tapped into meaning.</p>
<p>To sum up, the studies showing an effect of lexicality or meaningfulness on sign language processing (MacSweeney et al., <xref ref-type="bibr" rid="B17">2004</xref>; Carreiras et al., <xref ref-type="bibr" rid="B1800">2008</xref>; Guti&#x000E9;rrez et al., <xref ref-type="bibr" rid="B1801">2012</xref>; Husain et al., <xref ref-type="bibr" rid="B14">2012</xref>) all included at least one task that specifically draws attention to lexicality or meaningfulness. This differs from the present study which used a task based on monitoring phonological features of the stimuli, in line with Cardin et al. (<xref ref-type="bibr" rid="B5">2016</xref>) and Grosvald et al. (<xref ref-type="bibr" rid="B12">2012</xref>), with no requirement to take into account either lexicality or meaningfulness. No sign-based phoneme monitoring study to our knowledge has found effects of lexicality on sign language processing. Taken together, the evidence suggests that lexicality does not influence sign language processing at the sub-lexical level when meaningfulness is not in focus, and supports the notion that apparent lexicality effects relating to spoken word processing actually reflect the grapheme-phoneme conversion required when lexical access takes place <italic>via</italic> the orthographic route (Xiao et al., <xref ref-type="bibr" rid="B38">2005</xref>).</p>
</sec>
<sec id="s4-2">
<title>Iconicity</title>
<p>In the present study, iconicity was operationalized as the contrast between, on the one hand, Cognates and, on the other hand, BSL signs for the BSL signers and SSL signs for the SSL signers. Iconicity was operationalized in this way because given the lack of common ancestry between BSL and SSL, the Cognates were by definition iconic signs by virtue of meaning having been incidentally mapped to the same surface representation in both languages. The greater iconicity of the Cognates compared to the stimuli consisting of signs occurring only in BSL or only in SSL was demonstrated by the significant difference in iconicity ratings. No effect of iconicity was revealed by the present study. This finding is in line with our previous study (Cardin et al., <xref ref-type="bibr" rid="B5">2016</xref>) and generalizes it from BSL to SSL using the very same materials. It is also in line with the well-established notion that the link between a lexical item and its referent is characterized by its arbitrariness, and that any surface resemblance is irrelevant and does not influence language processing (for a review, see Perniss et al., <xref ref-type="bibr" rid="B3300">2010</xref>). However, it deviates from a recent set of findings suggesting that iconicity does indeed influence language processing and that under certain circumstances it may provide a link to experience that helps bridge the gap between linguistic form and conceptual representation (Perniss et al., <xref ref-type="bibr" rid="B3300">2010</xref>). In particular, Thompson et al. (<xref ref-type="bibr" rid="B33">2010</xref>) showed that iconicity interferes with phonological decision-making, rendering it slower and less accurate. However, Emmorey (<xref ref-type="bibr" rid="B9">2014</xref>) argued that psycholinguistic effects of iconicity may only be observed when the task specifically taps into the structured mapping capturing the resemblance between the form and its meaning. The task employed by Thompson et al. (<xref ref-type="bibr" rid="B33">2010</xref>) involved determining whether the fingers in any particular sign were straight or curved, tapping into structural mappings to objects with flat sides (e.g., BSL BOX) or curved sides (e.g., BSL CUP) respectively. The tasks used in the present study, however, which involved determining whether the handshape or location of each sign presented matched that of a given target, did not specifically tap into structural mappings.</p>
</sec>
<sec id="s4-3">
<title>Phonology</title>
<p>In the present study, phonology was operationalized as the contrast between legal signs and illegal non-signs. Phonologically illegal items elicited stronger activation in an action observation network than phonologically legal signs, in agreement with Cardin et al. (<xref ref-type="bibr" rid="B5">2016</xref>). This network included the supramarginal gyrus, a region associated with phonological processing in both sign language and spoken languages. This finding strengthens the notion of greater processing demands for illegal compared to legal signs (whether actual signs or phonologically possible signs), by showing that it generalizes across sign languages and is independent of the specific materials used as legal signs. Interestingly, behavioral results showed better performance on the location than handshape task for non-signs, i.e., when manual actions were phonologically illegal, but not for any of the legal signs. This suggests that the perceptual salience of location over handform (Brentari et al., <xref ref-type="bibr" rid="B3">2011</xref>) plays out to a greater extent for illegal manual actions than legal signs, strengthening the notion that phoneme monitoring of illegal items engenders perceptual rather than phonological processing.</p>
</sec>
<sec id="s4-4">
<title>Task</title>
<p>The phoneme monitoring task used in the present study was administered in two versions, one with handshape cues and one with location cues. Differences in the perceptual processing of handshape and location also became apparent in neural activation. Handshape compared to location monitoring generated more activation in the ventral visual stream while the opposite contrast generated more activation in the dorsal visual stream. This is in line with the perceptual nature of the two versions of the task: the handshape version focusing on &#x0201C;what&#x0201D; and the location version on &#x0201C;where&#x0201D; (Milner and Goodale, <xref ref-type="bibr" rid="B21">1993</xref>; Ungerleider and Haxby, <xref ref-type="bibr" rid="B35">1994</xref>). It also chimes in with the finding that location priming modulates an earlier ERP component (N400) than handshape (Guti&#x000E9;rrez et al., <xref ref-type="bibr" rid="B1801">2012</xref>).</p>
</sec>
<sec id="s4-5">
<title>Group</title>
<p>The main effect of group on the neural correlates of sign-based phoneme monitoring was novel and unexpected. In particular, the left occipital cortex, close to area V5/MT+ was activated more by the British signers than the Swedish signers during phoneme monitoring. This region is likely to be part of the MT+ complex, and could be involved in the kind of complex motion processing necessary to determine the phonological characteristics of manual gestures. However, because the phonological inventories of BSL and SSL are highly similar, it is unlikely that there are systematic differences in the motion processing demands of the experimental tasks across languages. Fewer items had to be excluded from the analysis for Swedish than British signers, principally because the British signers unexpectedly recognized some of the SSL signs. We have shown that lexicality does not influence the neural networks supporting phoneme monitoring and thus it is unlikely that the difference in exclusion rate contributes to the observed group effect. The one previous imaging study (Petitto et al., <xref ref-type="bibr" rid="B24">2000</xref>) including deaf users of two sign languages did not report differences in processing between the two groups. Unlike the sign languages used in the present study, BSL and SSL, the sign languages used in the study by Petitto et al. (<xref ref-type="bibr" rid="B24">2000</xref>), American Sign Language (ASL) and Quebec Sign Language (LSQ), are historically related, with the former heavily influencing the latter. Thus, the present study is the first to report differences in the neural networks supporting sign processing between well-matched groups of users of distinct sign languages with no known historical links.</p>
<p>Similar regions have previously been found to be activated during sign language processing, including signed sentences and discourse (S&#x000F6;derfeldt et al., <xref ref-type="bibr" rid="B30">1994</xref>, <xref ref-type="bibr" rid="B29">1997</xref>; MacSweeney et al., <xref ref-type="bibr" rid="B19">2002</xref>, <xref ref-type="bibr" rid="B17">2004</xref>, <xref ref-type="bibr" rid="B16">2006</xref>; Emmorey et al., <xref ref-type="bibr" rid="B11">2014</xref>) and individual signs (Petitto et al., <xref ref-type="bibr" rid="B24">2000</xref>; MacSweeney et al., <xref ref-type="bibr" rid="B16">2006</xref>; Capek et al., <xref ref-type="bibr" rid="B4">2008</xref>). The V5/MT+ complex is not generally associated with speech processing (Scott and Johnsrude, <xref ref-type="bibr" rid="B28">2003</xref>; Hickok and Poeppel, <xref ref-type="bibr" rid="B13">2007</xref>) and it has been shown to be activated more by sign than audiovisual speech in hearing signers (S&#x000F6;derfeldt et al., <xref ref-type="bibr" rid="B29">1997</xref>; Emmorey et al., <xref ref-type="bibr" rid="B11">2014</xref>) and more by sign than visual speechreading in deaf signers who are also proficient speech readers (Capek et al., <xref ref-type="bibr" rid="B4">2008</xref>). Thus, the V/MT+ complex seems to play a sign-specific role in language processing that may well involve mapping of visuo-dynamic input to higher-order phonological and semantic representations.</p>
<p>Further, Capek et al. (<xref ref-type="bibr" rid="B4">2008</xref>) showed that an adjacent area (&#x02212;47, &#x02212;59, &#x02212;10) was activated more for signs with non-speech-like than speech-like mouth actions in deaf native signers, thus further attesting to the specificity of this region for sign over speech. In that study, speech-like mouth actions (mouthings) were represented by mouth actions that disambiguate minimal pairs of lexical items with identical manual forms and which resemble the equivalent spoken forms (Sutton-Spence and Woll, <xref ref-type="bibr" rid="B31">1999</xref>). Non-speech-like mouth actions (mouth gestures) were represented by one type of mouth gesture, echo phonology, i.e., mouth actions that &#x0201C;echo&#x0201D; on the mouth certain articulatory movements of the hands (Woll, <xref ref-type="bibr" rid="B37">2014</xref>).</p>
<p>Group-specific differences in motion-processing regions of the occipital cortex are also documented in the literature. These include more activation during observation of sign language for hearing signers than hearing non-signers (MacSweeney et al., <xref ref-type="bibr" rid="B16">2006</xref>), indicating sensitivity to the linguistic content of the stimuli; and more activation for hearing than deaf native signers during comprehension of signed sentences, possibly indicating sensitivity to differences in sign language skill (MacSweeney et al., <xref ref-type="bibr" rid="B17">2004</xref>). To summarize, the literature suggests that motion-processing regions of the occipital cortex support the processing of communicative gestures and that their engagement is modulated by the sign-specificity of accompanying mouth actions as well as the ability to access linguistic content and skill in achieving this.</p>
<p>Data from a previous study from our lab comparing memory processing in deaf signers and hearing non-signers in British and Swedish participants indicated that whereas a sign-based memory strategy was used by Swedish participants, a speech-based strategy was used by British participants (Andin et al., <xref ref-type="bibr" rid="B2">2013</xref>). In that article, we argued that this might be because sign language education has been more consistently implemented in deaf education in Sweden (Svartholm, <xref ref-type="bibr" rid="B32">2010</xref>; Andin et al., <xref ref-type="bibr" rid="B1">2018</xref>) than in Britain and that systematic differences in the education models might lead to different sign-processing strategies. Thus, one explanation of the difference in neural networks supporting sign-based phoneme monitoring in British and Swedish deaf signers could be systematic differences in the sign-processing strategies used for task solution (see MacSweeney et al., <xref ref-type="bibr" rid="B17">2004</xref>) due to differences in approaches to the use of spoken language in deaf educational settings. Such differences would be unlikely to occur between sign languages in similar cultural settings (see Petitto et al., <xref ref-type="bibr" rid="B24">2000</xref>).</p>
<p>Bearing in mind that Capek et al. (<xref ref-type="bibr" rid="B4">2008</xref>) found differences in the activation of motion-processing regions of the occipital cortex depending on whether the signs observed by the deaf participants included speech-like or non-speech-like mouth actions, it needs to be considered whether the activation difference in the present study is driven by systematic differences in the use of such characteristics between BSL and SSL. Even though the signed stimuli used in the present study did not include mouth actions, it is not inconceivable that representations of mouth actions could be activated during observation and processing of manual actions. Crasborn et al. (<xref ref-type="bibr" rid="B7">2008</xref>) compared the distribution of mouth actions in BSL and SSL and found that they were highly similar across languages. In particular, occurrence of mouthings in the corpus examined was 51% in BSL and 57% in SSL while the corresponding occurrence of echo phonology was 2% and 7%. This suggests that even if the two groups of participants had different strategies regarding the extent to which they made use of existing representations of mouth actions while performing the phoneme monitoring task, it is unlikely that such strategy differences would be confounded by systematic differences in the occurrence of different types of mouth actions across these two languages. Thus, we suggest that greater activation of motion-processing regions of the occipital cortex for British compared to Swedish deaf signers during a sign-based phoneme-monitoring task in the present study may be due to the use of different strategies. It is possible that different strategies are driven by different educational experiences, but this needs to be investigated further.</p>
</sec>
</sec>
<sec sec-type="conclusion" id="s5">
<title>Conclusion</title>
<p>The pattern of results in the present study suggests that lexicality does not influence sublexical processing of sign language in the absence of lexical task demands. This finding supports the notion that the effects of lexicality previously observed in phoneme monitoring of speech may reflect an orthographic strategy. Further, results showed that the neural networks supporting linguistic processing are modulated by phonological constraints but not iconicity, at least in the absence of structural mapping requirements. Although deaf signers from different language and cultural backgrounds engage largely similar neural networks during sign-based phoneme monitoring, we identified differential activation of motion-processing regions of the occipital cortex possibly relating to differences in strategies possibly driven by cultural differences such as schooling. Importantly, the group effect does not interact with lexicality, underscoring the robustness of the absence of a lexicality effect.</p>
</sec>
<sec id="s7">
<title>Ethics Statement</title>
<p>This study was carried out in accordance with the recommendations of the UCL Ethical committee and Swedish legislation with written informed consent from all subjects. All subjects gave written informed consent in accordance with the Declaration of Helsinki. The protocol was approved by the UCL Ethical committee and the Regional Ethical Review Board in Link&#x000F6;ping, Sweden.</p>
</sec>
<sec id="s8">
<title>Author Contributions</title>
<p>The study was designed by MR, EO, BW, CC and JR. The data were collected by EO, LK, VC and CC and analyzed by LK, VC. MR prepared the first draft of the article and all authors contributed to the final version.</p>
</sec>
<sec id="s9">
<title>Conflict of Interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
</body>
<back>
<ack>
<p>We thank Mischa Cooke, Lena Davidsson, Anders Hermansson, Ramas Rentelis, Lilli Risner, and Guiping Xu for help with participant recruitment and data acquisition. Special thanks go to the British and Swedish participants in the study.</p>
</ack>
<fn-group>
<fn fn-type="financial-disclosure">
<p><bold>Funding.</bold> This study was supported by funding from the Swedish Foundation for Humanities and Social Sciences, grant number P2008-0481:1-E; the Swedish Council for Working Life and Social Research, grant number 2008-0846; and the Linnaeus Centre HEAD grant from the Swedish Research Council as well as grants from the Economic and Social Research Council of Great Britain (RES-620-28-6001; RES-620-28-6002) to the Deafness Cognition and Language Research Centre.</p>
</fn>
</fn-group>
<ref-list>
<title>References</title>
<ref id="B1"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Andin</surname> <given-names>J.</given-names></name> <name><surname>Fransson</surname> <given-names>P.</given-names></name> <name><surname>R&#x000F6;nnberg</surname> <given-names>J.</given-names></name> <name><surname>Rudner</surname> <given-names>M.</given-names></name></person-group> (<year>2018</year>). <article-title>fMRI evidence of magnitude manipulation during numerical order processing in congenitally deaf signers</article-title>. <source>Neural Plast.</source> <volume>2018</volume>:<fpage>2576047</fpage>. <pub-id pub-id-type="doi">10.1155/2018/2576047</pub-id><pub-id pub-id-type="pmid">30662455</pub-id></citation></ref>
<ref id="B2"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Andin</surname> <given-names>J.</given-names></name> <name><surname>Orfanidou</surname> <given-names>E.</given-names></name> <name><surname>Cardin</surname> <given-names>V.</given-names></name> <name><surname>Holmer</surname> <given-names>E.</given-names></name> <name><surname>Capek</surname> <given-names>C. M.</given-names></name> <name><surname>Woll</surname> <given-names>B.</given-names></name> <etal/></person-group>. (<year>2013</year>). <article-title>Similar digit-based working memory in deaf signers and hearing non-signers despite digit span differences</article-title>. <source>Front. Psychol.</source> <volume>4</volume>:<fpage>942</fpage>. <pub-id pub-id-type="doi">10.3389/fpsyg.2013.00942</pub-id><pub-id pub-id-type="pmid">24379797</pub-id></citation></ref>
<ref id="B3"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Brentari</surname> <given-names>D.</given-names></name> <name><surname>Gonz&#x000E1;lez</surname> <given-names>C.</given-names></name> <name><surname>Seidl</surname> <given-names>A.</given-names></name> <name><surname>Wilbur</surname> <given-names>R.</given-names></name></person-group> (<year>2011</year>). <article-title>Sensitivity to visual prosodic cues in signers and nonsigners</article-title>. <source>Lang. Speech</source> <volume>54</volume>, <fpage>49</fpage>&#x02013;<lpage>72</lpage>. <pub-id pub-id-type="doi">10.1177/0023830910388011</pub-id><pub-id pub-id-type="pmid">21524012</pub-id></citation></ref>
<ref id="B4"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Capek</surname> <given-names>C. M.</given-names></name> <name><surname>Waters</surname> <given-names>D.</given-names></name> <name><surname>Woll</surname> <given-names>B.</given-names></name> <name><surname>MacSweeney</surname> <given-names>M.</given-names></name> <name><surname>Brammer</surname> <given-names>M. J.</given-names></name> <name><surname>McGuire</surname> <given-names>P. K.</given-names></name> <etal/></person-group>. (<year>2008</year>). <article-title>Hand and mouth: cortical correlates of lexical processing in British sign language and Speechreading English</article-title>. <source>J. Cogn. Neurosci.</source> <volume>20</volume>, <fpage>1220</fpage>&#x02013;<lpage>1234</lpage>. <pub-id pub-id-type="doi">10.1162/jocn.2008.20084</pub-id><pub-id pub-id-type="pmid">18284353</pub-id></citation></ref>
<ref id="B5"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cardin</surname> <given-names>V.</given-names></name> <name><surname>Orfanidou</surname> <given-names>E.</given-names></name> <name><surname>K&#x000E4;stner</surname> <given-names>L.</given-names></name> <name><surname>R&#x000F6;nnberg</surname> <given-names>J.</given-names></name> <name><surname>Woll</surname> <given-names>B.</given-names></name> <name><surname>Capek</surname> <given-names>C. M.</given-names></name> <etal/></person-group>. (<year>2016</year>). <article-title>Monitoring different phonological parameters of sign language engages the same cortical language network but distinctive perceptual ones</article-title>. <source>J. Cogn. Neurosci.</source> <volume>28</volume>, <fpage>20</fpage>&#x02013;<lpage>40</lpage>. <pub-id pub-id-type="doi">10.1162/jocn_a_00872</pub-id><pub-id pub-id-type="pmid">26351993</pub-id></citation></ref>
<ref id="B6"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cardin</surname> <given-names>V.</given-names></name> <name><surname>Orfanidou</surname> <given-names>E.</given-names></name> <name><surname>R&#x000F6;nnberg</surname> <given-names>J.</given-names></name> <name><surname>Capek</surname> <given-names>C. M.</given-names></name> <name><surname>Rudner</surname> <given-names>M.</given-names></name> <name><surname>Woll</surname> <given-names>B.</given-names></name></person-group> (<year>2013</year>). <article-title>Dissociating cognitive and sensory neural plasticity in human superior temporal cortex</article-title>. <source>Nat. Commun.</source> <volume>4</volume>:<fpage>1473</fpage>. <pub-id pub-id-type="doi">10.1038/ncomms2463</pub-id><pub-id pub-id-type="pmid">23403574</pub-id></citation></ref>
<ref id="B1800"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Carreiras</surname> <given-names>M.</given-names></name> <name><surname>Guti&#x000E9;rrez-Sigut</surname> <given-names>E.</given-names></name> <name><surname>Baquero</surname> <given-names>S.</given-names></name> <name><surname>Corina</surname> <given-names>D.</given-names></name></person-group> (<year>2008</year>). <article-title>Lexical processing in Spanish Sign Language (LSE)</article-title>. <source>J. Mem. Lang.</source> <volume>58</volume>, <fpage>100</fpage>&#x02013;<lpage>122</lpage>. <pub-id pub-id-type="doi">10.1016/j.jml.2007.05.004</pub-id><pub-id pub-id-type="pmid">18805728</pub-id></citation></ref>
<ref id="B7"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Crasborn</surname> <given-names>O.</given-names></name> <name><surname>van der Kooij</surname> <given-names>E.</given-names></name> <name><surname>Waters</surname> <given-names>D.</given-names></name> <name><surname>Woll</surname> <given-names>B.</given-names></name> <name><surname>Mesch</surname> <given-names>J.</given-names></name></person-group> (<year>2008</year>). <article-title>Frequency distribution and spreading behavior of different types of mouth actions in three sign languages</article-title>. <source>Sign Lang. Linguist.</source> <volume>11</volume>, <fpage>45</fpage>&#x02013;<lpage>67</lpage>. <pub-id pub-id-type="doi">10.1075/sll.11.1.04cra </pub-id></citation></ref>
<ref id="B8"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Eickhoff</surname> <given-names>S. B.</given-names></name> <name><surname>Stephan</surname> <given-names>K. E.</given-names></name> <name><surname>Mohlberg</surname> <given-names>H.</given-names></name> <name><surname>Grefkes</surname> <given-names>C.</given-names></name> <name><surname>Fink</surname> <given-names>G. R.</given-names></name> <name><surname>Amunts</surname> <given-names>K.</given-names></name> <etal/></person-group>. (<year>2005</year>). <article-title>A new SPM toolbox for combining probabilistic cytoarchitectonic maps and functional imaging data</article-title>. <source>Neuroimage</source> <volume>25</volume>, <fpage>1325</fpage>&#x02013;<lpage>1335</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2004.12.034</pub-id><pub-id pub-id-type="pmid">15850749</pub-id></citation></ref>
<ref id="B10"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Emmorey</surname> <given-names>K.</given-names></name></person-group> (<year>2002</year>). <source>Language, Cognition and the Brain: Insights from Sign Language Research.</source> <publisher-loc>Mahwah, NJ</publisher-loc>: <publisher-name>Erlbaum</publisher-name>.</citation></ref>
<ref id="B9"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Emmorey</surname> <given-names>K.</given-names></name></person-group> (<year>2014</year>). <article-title>Iconicity as structure mapping</article-title>. <source>Philos. Trans. R. Soc. Lond. B Biol. Sci.</source> <volume>369</volume>:<fpage>20130301</fpage>. <pub-id pub-id-type="doi">10.1098/rstb.2013.0301</pub-id><pub-id pub-id-type="pmid">25092669</pub-id></citation></ref>
<ref id="B11"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Emmorey</surname> <given-names>K.</given-names></name> <name><surname>McCullough</surname> <given-names>S.</given-names></name> <name><surname>Mehta</surname> <given-names>S.</given-names></name> <name><surname>Grabowski</surname> <given-names>T. J.</given-names></name></person-group> (<year>2014</year>). <article-title>How sensory-motor systems impact the neural organization for language: direct contrasts between spoken and signed language</article-title>. <source>Front. Psychol.</source> <volume>5</volume>:<fpage>484</fpage>. <pub-id pub-id-type="doi">10.3389/fpsyg.2014.00484</pub-id><pub-id pub-id-type="pmid">24904497</pub-id></citation></ref>
<ref id="B12"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Grosvald</surname> <given-names>M.</given-names></name> <name><surname>Lachaud</surname> <given-names>C.</given-names></name> <name><surname>Corina</surname> <given-names>D.</given-names></name></person-group> (<year>2012</year>). <article-title>Handshape monitoring: evaluation of linguistic and perceptual factors in the processing of American Sign Language</article-title>. <source>Lang. Cogn. Process.</source> <volume>27</volume>, <fpage>117</fpage>&#x02013;<lpage>141</lpage>. <pub-id pub-id-type="doi">10.1080/01690965.2010.549667</pub-id><pub-id pub-id-type="pmid">22822282</pub-id></citation></ref>
<ref id="B1801"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Guti&#x000E9;rrez</surname> <given-names>E.</given-names></name> <name><surname>M&#x000FC;ller</surname> <given-names>O.</given-names></name> <name><surname>Baus</surname> <given-names>C.</given-names></name> <name><surname>Carreiras</surname> <given-names>M.</given-names></name></person-group> (<year>2012</year>). <article-title>Electrophysiological evidence for phonological priming in Spanish Sign Language lexical access</article-title>. <source>Neuropsychologia</source> <volume>50</volume>, <fpage>1335</fpage>&#x02013;<lpage>1346</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuropsychologia.2012.02.018</pub-id><pub-id pub-id-type="pmid">18805728</pub-id></citation></ref>
<ref id="B13"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hickok</surname> <given-names>G.</given-names></name> <name><surname>Poeppel</surname> <given-names>D.</given-names></name></person-group> (<year>2007</year>). <article-title>The cortical organization of speech processing</article-title>. <source>Nat. Rev. Neurosci.</source> <volume>8</volume>, <fpage>393</fpage>&#x02013;<lpage>402</lpage>. <pub-id pub-id-type="doi">10.1038/nrn2113</pub-id><pub-id pub-id-type="pmid">17431404</pub-id></citation></ref>
<ref id="B14"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Husain</surname> <given-names>F. T.</given-names></name> <name><surname>Patkin</surname> <given-names>D. J.</given-names></name> <name><surname>Kim</surname> <given-names>J.</given-names></name> <name><surname>Braun</surname> <given-names>A. R.</given-names></name> <name><surname>Horwitz</surname> <given-names>B.</given-names></name></person-group> (<year>2012</year>). <article-title>Dissociating neural correlates of meaningful emblems from meaningless gestures in deaf signers and hearing non-signers</article-title>. <source>Brain Res.</source> <volume>1478</volume>, <fpage>24</fpage>&#x02013;<lpage>35</lpage>. <pub-id pub-id-type="doi">10.1016/j.brainres.2012.08.029</pub-id><pub-id pub-id-type="pmid">22968047</pub-id></citation></ref>
<ref id="B15"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kolster</surname> <given-names>H.</given-names></name> <name><surname>Peeters</surname> <given-names>R.</given-names></name> <name><surname>Orban</surname> <given-names>G. A.</given-names></name></person-group> (<year>2010</year>). <article-title>The retinotopic organization of the human middle temporal area MT/V5 and its cortical neighbors</article-title>. <source>J. Neurosci.</source> <volume>30</volume>, <fpage>9801</fpage>&#x02013;<lpage>9820</lpage>. <pub-id pub-id-type="doi">10.1523/jneurosci.2069-10.2010</pub-id><pub-id pub-id-type="pmid">20660263</pub-id></citation></ref>
<ref id="B16"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>MacSweeney</surname> <given-names>M.</given-names></name> <name><surname>Campbell</surname> <given-names>R.</given-names></name> <name><surname>Woll</surname> <given-names>B.</given-names></name> <name><surname>Brammer</surname> <given-names>M. J.</given-names></name> <name><surname>Giampietro</surname> <given-names>V.</given-names></name> <name><surname>David</surname> <given-names>A. S.</given-names></name> <etal/></person-group>. (<year>2006</year>). <article-title>Lexical and sentential processing in British Sign Language</article-title>. <source>Hum. Brain Mapp.</source> <volume>27</volume>, <fpage>63</fpage>&#x02013;<lpage>76</lpage>. <pub-id pub-id-type="doi">10.1002/hbm.20167</pub-id><pub-id pub-id-type="pmid">15966001</pub-id></citation></ref>
<ref id="B17"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>MacSweeney</surname> <given-names>M.</given-names></name> <name><surname>Campbell</surname> <given-names>R.</given-names></name> <name><surname>Woll</surname> <given-names>B.</given-names></name> <name><surname>Giampietro</surname> <given-names>V.</given-names></name> <name><surname>David</surname> <given-names>A. S.</given-names></name> <name><surname>McGuire</surname> <given-names>P. K.</given-names></name> <etal/></person-group>. (<year>2004</year>). <article-title>Dissociating linguistic and nonlinguistic gestural communication in the brain</article-title>. <source>Neuroimage</source> <volume>22</volume>, <fpage>1605</fpage>&#x02013;<lpage>1618</lpage>. <pub-id pub-id-type="doi">10.1016/j.neuroimage.2004.03.015</pub-id><pub-id pub-id-type="pmid">15275917</pub-id></citation></ref>
<ref id="B18"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>MacSweeney</surname> <given-names>M.</given-names></name> <name><surname>Capek</surname> <given-names>C. M.</given-names></name> <name><surname>Campbell</surname> <given-names>R.</given-names></name> <name><surname>Woll</surname> <given-names>B.</given-names></name></person-group> (<year>2008</year>). <article-title>The signing brain: the neurobiology of sign language</article-title>. <source>Trends Cogn. Sci.</source> <volume>12</volume>, <fpage>432</fpage>&#x02013;<lpage>440</lpage>. <pub-id pub-id-type="doi">10.1016/j.tics.2008.07.010</pub-id><pub-id pub-id-type="pmid">18805728</pub-id></citation></ref>
<ref id="B19"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>MacSweeney</surname> <given-names>M.</given-names></name> <name><surname>Woll</surname> <given-names>B.</given-names></name> <name><surname>Campbell</surname> <given-names>R.</given-names></name> <name><surname>McGuire</surname> <given-names>P. K.</given-names></name> <name><surname>David</surname> <given-names>A. S.</given-names></name> <name><surname>Williams</surname> <given-names>S. C.</given-names></name> <etal/></person-group>. (<year>2002</year>). <article-title>Neural systems underlying British Sign Language and audio-visual English processing in native users</article-title>. <source>Brain</source> <volume>125</volume>, <fpage>1583</fpage>&#x02013;<lpage>1593</lpage>. <pub-id pub-id-type="doi">10.1093/brain/awf153</pub-id><pub-id pub-id-type="pmid">12077007</pub-id></citation></ref>
<ref id="B20"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Malikovic</surname> <given-names>A.</given-names></name> <name><surname>Amunts</surname> <given-names>K.</given-names></name> <name><surname>Schleicher</surname> <given-names>A.</given-names></name> <name><surname>Mohlberg</surname> <given-names>H.</given-names></name> <name><surname>Eickhoff</surname> <given-names>S. B.</given-names></name> <name><surname>Wilms</surname> <given-names>M.</given-names></name> <etal/></person-group>. (<year>2007</year>). <article-title>Cytoarchitectonic analysis of the human extrastriate cortex in the region of V5/MT+: a probabilistic, stereotaxic map of area hOc5</article-title>. <source>Cereb. Cortex</source> <volume>17</volume>, <fpage>562</fpage>&#x02013;<lpage>574</lpage>. <pub-id pub-id-type="doi">10.1093/cercor/bhj181</pub-id><pub-id pub-id-type="pmid">16603710</pub-id></citation></ref>
<ref id="B21"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Milner</surname> <given-names>A. D.</given-names></name> <name><surname>Goodale</surname> <given-names>M. A.</given-names></name></person-group> (<year>1993</year>). <article-title>Visual pathways to perception and action</article-title>. <source>Prog. Brain Res.</source> <volume>95</volume>, <fpage>317</fpage>&#x02013;<lpage>337</lpage>. <pub-id pub-id-type="doi">10.1016/s0079-6123(08)60379-9</pub-id><pub-id pub-id-type="pmid">8493342</pub-id></citation></ref>
<ref id="B22"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Newman</surname> <given-names>S. D.</given-names></name> <name><surname>Twieg</surname> <given-names>D.</given-names></name></person-group> (<year>2001</year>). <article-title>Differences in auditory processing of words and pseudowords: an fMRI study</article-title>. <source>Hum. Brain Mapp.</source> <volume>14</volume>, <fpage>39</fpage>&#x02013;<lpage>47</lpage>. <pub-id pub-id-type="doi">10.1002/hbm.1040</pub-id><pub-id pub-id-type="pmid">11500989</pub-id></citation></ref>
<ref id="B23"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Oldfield</surname> <given-names>R. C.</given-names></name></person-group> (<year>1971</year>). <article-title>The assessment and analysis of handedness: the Edinburgh inventory</article-title>. <source>Neuropsychologia</source> <volume>9</volume>, <fpage>97</fpage>&#x02013;<lpage>113</lpage>. <pub-id pub-id-type="doi">10.1016/0028-3932(71)90067-4</pub-id><pub-id pub-id-type="pmid">5146491</pub-id></citation></ref>
<ref id="B3300"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Perniss</surname> <given-names>P.</given-names></name> <name><surname>Thompson</surname> <given-names>R. L.</given-names></name> <name><surname>Vigliocco</surname> <given-names>G.</given-names></name></person-group> (<year>2010</year>). <article-title>Iconicity as a general property of language: evidence from spoken and signed languages</article-title>. <source>Front. Psychol.</source> <volume>1</volume>:<fpage>227</fpage>. <pub-id pub-id-type="doi">10.3389/fpsyg.2010.00227</pub-id><pub-id pub-id-type="pmid">20565217</pub-id></citation></ref>
<ref id="B24"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Petitto</surname> <given-names>L. A.</given-names></name> <name><surname>Zatorre</surname> <given-names>R. J.</given-names></name> <name><surname>Gauna</surname> <given-names>K.</given-names></name> <name><surname>Nikelski</surname> <given-names>E. J.</given-names></name> <name><surname>Dostie</surname> <given-names>D.</given-names></name> <name><surname>Evans</surname> <given-names>A. C.</given-names></name></person-group> (<year>2000</year>). <article-title>Speech-like cerebral activity in profoundly deaf people processing signed languages: implications for the neural basis of human language</article-title>. <source>Proc. Natl. Acad. Sci. U S A</source> <volume>97</volume>, <fpage>13961</fpage>&#x02013;<lpage>13966</lpage>. <pub-id pub-id-type="doi">10.1073/pnas.97.25.13961</pub-id><pub-id pub-id-type="pmid">11106400</pub-id></citation></ref>
<ref id="B25"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pitt</surname> <given-names>M. A.</given-names></name> <name><surname>Samuel</surname> <given-names>A. G.</given-names></name></person-group> (<year>1995</year>). <article-title>Lexical and sublexical feedback in auditory word recognition</article-title>. <source>Cogn. Psychol.</source> <volume>29</volume>, <fpage>149</fpage>&#x02013;<lpage>188</lpage>. <pub-id pub-id-type="doi">10.1006/cogp.1995.1014</pub-id><pub-id pub-id-type="pmid">7587137</pub-id></citation></ref>
<ref id="B26"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>R&#x000F6;nnberg</surname> <given-names>J.</given-names></name> <name><surname>S&#x000F6;derfeldt</surname> <given-names>B.</given-names></name> <name><surname>Risberg</surname> <given-names>J.</given-names></name></person-group> (<year>2000</year>). <article-title>The cognitive neuroscience of signed language</article-title>. <source>Acta Psychol.</source> <volume>105</volume>, <fpage>237</fpage>&#x02013;<lpage>254</lpage>. <pub-id pub-id-type="doi">10.1016/s0001-6918(00)00063-9</pub-id><pub-id pub-id-type="pmid">11194414</pub-id></citation></ref>
<ref id="B27"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Sandler</surname> <given-names>W.</given-names></name> <name><surname>Lillo-Martin</surname> <given-names>D.</given-names></name></person-group> (<year>2006</year>). <source>Sign Language and Linguistic Universals.</source> <publisher-loc>Cambridge</publisher-loc>: <publisher-name>Cambridge University Press</publisher-name>.</citation></ref>
<ref id="B28"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Scott</surname> <given-names>S. K.</given-names></name> <name><surname>Johnsrude</surname> <given-names>I. S.</given-names></name></person-group> (<year>2003</year>). <article-title>The neuroanatomical and functional organization of speech perception</article-title>. <source>Trends Neurosci.</source> <volume>26</volume>, <fpage>100</fpage>&#x02013;<lpage>107</lpage>. <pub-id pub-id-type="doi">10.1016/s0166-2236(02)00037-1</pub-id><pub-id pub-id-type="pmid">12536133</pub-id></citation></ref>
<ref id="B29"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>S&#x000F6;derfeldt</surname> <given-names>B.</given-names></name> <name><surname>Ingvar</surname> <given-names>M.</given-names></name> <name><surname>R&#x000F6;nnberg</surname> <given-names>J.</given-names></name> <name><surname>Eriksson</surname> <given-names>L.</given-names></name> <name><surname>Serrander</surname> <given-names>M.</given-names></name> <name><surname>Stone-Elander</surname> <given-names>S.</given-names></name></person-group> (<year>1997</year>). <article-title>Signed and spoken language perception studied by positron emission tomography</article-title>. <source>Neurology</source> <volume>49</volume>, <fpage>82</fpage>&#x02013;<lpage>87</lpage>. <pub-id pub-id-type="doi">10.1212/wnl.49.1.82</pub-id><pub-id pub-id-type="pmid">9222174</pub-id></citation></ref>
<ref id="B30"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>S&#x000F6;derfeldt</surname> <given-names>B.</given-names></name> <name><surname>R&#x000F6;nnberg</surname> <given-names>J.</given-names></name> <name><surname>Risberg</surname> <given-names>J.</given-names></name></person-group> (<year>1994</year>). <article-title>Regional cerebral blood flow in sign language users</article-title>. <source>Brain Lang.</source> <volume>46</volume>, <fpage>59</fpage>&#x02013;<lpage>68</lpage>. <pub-id pub-id-type="doi">10.1006/brln.1994.1004</pub-id><pub-id pub-id-type="pmid">8131044</pub-id></citation></ref>
<ref id="B31"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Sutton-Spence</surname> <given-names>R.</given-names></name> <name><surname>Woll</surname> <given-names>B.</given-names></name></person-group> (<year>1999</year>). <source>The linguistics of British Sign Language: An introduction.</source> <publisher-loc>Cambridge</publisher-loc>: <publisher-name>Cambridge University Press</publisher-name>.</citation></ref>
<ref id="B32"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Svartholm</surname> <given-names>K.</given-names></name></person-group> (<year>2010</year>). <article-title>Bilingual education for deaf children in Sweden</article-title>. <source>Int. J. Biling. Educ. Biling.</source> <volume>13</volume>, <fpage>159</fpage>&#x02013;<lpage>174</lpage>. <pub-id pub-id-type="doi">10.1080/13670050903474077</pub-id></citation></ref>
<ref id="B33"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Thompson</surname> <given-names>R. L.</given-names></name> <name><surname>Vinson</surname> <given-names>D. P.</given-names></name> <name><surname>Vigliocco</surname> <given-names>G.</given-names></name></person-group> (<year>2010</year>). <article-title>The link between form and meaning in british sign language: effects of iconicity for phonological decisions</article-title>. <source>J. Exp. Psychol. Learn. Mem. Cogn.</source> <volume>36</volume>, <fpage>1017</fpage>&#x02013;<lpage>1027</lpage>. <pub-id pub-id-type="doi">10.1037/a0019339</pub-id><pub-id pub-id-type="pmid">20565217</pub-id></citation></ref>
<ref id="B34"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Tootell</surname> <given-names>R. B.</given-names></name> <name><surname>Reppas</surname> <given-names>J. B.</given-names></name> <name><surname>Kwong</surname> <given-names>K. K.</given-names></name> <name><surname>Malach</surname> <given-names>R.</given-names></name> <name><surname>Born</surname> <given-names>R. T.</given-names></name> <name><surname>Brady</surname> <given-names>T. J.</given-names></name> <etal/></person-group>. (<year>1995</year>). <article-title>Functional analysis of human MT and related visual cortical areas using magnetic resonance imaging</article-title>. <source>J. Neurosci.</source> <volume>15</volume>, <fpage>3215</fpage>&#x02013;<lpage>3230</lpage>. <pub-id pub-id-type="doi">10.1523/jneurosci.15-04-03215.1995</pub-id><pub-id pub-id-type="pmid">7722658</pub-id></citation></ref>
<ref id="B35"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ungerleider</surname> <given-names>L. G.</given-names></name> <name><surname>Haxby</surname> <given-names>J. V.</given-names></name></person-group> (<year>1994</year>). <article-title>&#x0201C;What&#x0201D; and &#x0201C;where&#x0201D; in the human brain</article-title>. <source>Curr. Opin. Neurobiol.</source> <volume>4</volume>, <fpage>157</fpage>&#x02013;<lpage>165</lpage>. <pub-id pub-id-type="doi">10.1016/0959-4388(94)90066-3</pub-id><pub-id pub-id-type="pmid">8038571</pub-id></citation></ref>
<ref id="B36"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Vinson</surname> <given-names>D. P.</given-names></name> <name><surname>Cormier</surname> <given-names>K.</given-names></name> <name><surname>Denmark</surname> <given-names>T.</given-names></name> <name><surname>Schembri</surname> <given-names>A.</given-names></name> <name><surname>Vigliocco</surname> <given-names>G.</given-names></name></person-group> (<year>2008</year>). <article-title>The British Sign Language (BSL) norms for age of acquisition, familiarity and iconicity</article-title>. <source>Behav. Res. Methods</source> <volume>40</volume>, <fpage>1079</fpage>&#x02013;<lpage>1087</lpage>. <pub-id pub-id-type="doi">10.3758/brm.40.4.1079</pub-id><pub-id pub-id-type="pmid">19001399</pub-id></citation></ref>
<ref id="B37"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Woll</surname> <given-names>B.</given-names></name></person-group> (<year>2014</year>). <article-title>Moving from hand to mouth: echo phonology and the origins of language</article-title>. <source>Front. Psychol.</source> <volume>5</volume>:<fpage>662</fpage>. <pub-id pub-id-type="doi">10.3389/fpsyg.2014.00662</pub-id><pub-id pub-id-type="pmid">25071636</pub-id></citation></ref>
<ref id="B38"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Xiao</surname> <given-names>Z.</given-names></name> <name><surname>Zhang</surname> <given-names>J. X.</given-names></name> <name><surname>Wang</surname> <given-names>X.</given-names></name> <name><surname>Wu</surname> <given-names>R.</given-names></name> <name><surname>Hu</surname> <given-names>X.</given-names></name> <name><surname>Weng</surname> <given-names>X.</given-names></name> <etal/></person-group>. (<year>2005</year>). <article-title>Differential activity in left inferior frontal gyrus for pseudowords and real words: an event-related fMRI study on auditory lexical decision</article-title>. <source>Hum. Brain Mapp.</source> <volume>25</volume>, <fpage>212</fpage>&#x02013;<lpage>221</lpage>. <pub-id pub-id-type="doi">10.1002/hbm.20105</pub-id><pub-id pub-id-type="pmid">15846769</pub-id></citation></ref>
</ref-list>
<fn-group>
<fn id="fn0001"><p><sup>1</sup>Some writing systems do exist (e.g., SignWriting), but these are not in general use in any sign language.</p></fn>
</fn-group>
</back>
</article>
