| { |
| "paper_id": "2020", |
| "header": { |
| "generated_with": "S2ORC 1.0.0", |
| "date_generated": "2023-01-19T15:22:13.447876Z" |
| }, |
| "title": "Towards Kurdish Text to Sign Translation", |
| "authors": [ |
| { |
| "first": "Zina", |
| "middle": [], |
| "last": "Kamal", |
| "suffix": "", |
| "affiliation": { |
| "laboratory": "", |
| "institution": "University of Kurdistan Hewl\u00ear Kurdistan Region -Iraq", |
| "location": { |
| "country": "Kurdistan Region -Iraq" |
| } |
| }, |
| "email": "z.kamal3@ukh.edu" |
| }, |
| { |
| "first": "Hossein", |
| "middle": [], |
| "last": "Hassani", |
| "suffix": "", |
| "affiliation": { |
| "laboratory": "", |
| "institution": "University of Kurdistan Hewl\u00ear Kurdistan Region -Iraq", |
| "location": { |
| "country": "Kurdistan Region -Iraq" |
| } |
| }, |
| "email": "hosseinh@ukh.edu" |
| } |
| ], |
| "year": "", |
| "venue": null, |
| "identifiers": {}, |
| "abstract": "The resources and technologies for sign language processing of resourceful languages are emerging, while the low-resource languages are falling behind. Kurdish is a multi-dialect language, and it is considered a low-resource language. It is spoken by approximately 30 million people in several countries, which denotes that it has a large community with hearing-impairments as well. This paper reports on a project which aims to develop the necessary data and tools to process the sign language for Sorani as one of the spoken Kurdish dialects. We present the results of developing a dataset in HamNoSys and its corresponding SiGML form for the Kurdish Sign lexicon. We use this dataset to implement a sign-supported Kurdish tool to check the accuracy of the sign lexicon. We tested the tool by presenting it to hearing-impaired individuals. The experiment showed that 100% of the translated letters were understandable by a hearing-impaired person.", |
| "pdf_parse": { |
| "paper_id": "2020", |
| "_pdf_hash": "", |
| "abstract": [ |
| { |
| "text": "The resources and technologies for sign language processing of resourceful languages are emerging, while the low-resource languages are falling behind. Kurdish is a multi-dialect language, and it is considered a low-resource language. It is spoken by approximately 30 million people in several countries, which denotes that it has a large community with hearing-impairments as well. This paper reports on a project which aims to develop the necessary data and tools to process the sign language for Sorani as one of the spoken Kurdish dialects. We present the results of developing a dataset in HamNoSys and its corresponding SiGML form for the Kurdish Sign lexicon. We use this dataset to implement a sign-supported Kurdish tool to check the accuracy of the sign lexicon. We tested the tool by presenting it to hearing-impaired individuals. The experiment showed that 100% of the translated letters were understandable by a hearing-impaired person.", |
| "cite_spans": [], |
| "ref_spans": [], |
| "eq_spans": [], |
| "section": "Abstract", |
| "sec_num": null |
| } |
| ], |
| "body_text": [ |
| { |
| "text": "The studies on sign language processing have been emerging, but many areas are still unexplored (Cormier et al., 2019) . As might be expected, this area of research has even not been initiated yet for many under-resourced languages. Kurdish, a multi-dialect language which is spoken by approximately 30 million people in different countries, is considered an under-resourced language (Hassani, 2018) . It is also written in different scripts. The usage of the scripts changes according to the geographical situation (Hassani and Medjedovic, 2016) . The current literature does not report on visible research on Kurdish Sign Language (KuSL) processing, nor are there any publicly available resources for this topic. This research focuses on text to sign conversion for the Sorani dialect of Kurdish. Sign language is the main communication method among the hearing-impaired community. This language is based on visual interaction rather than using sound. The interactions happen by manual and nonmanual signs and finger spelling (Cooper et al., 2011) . Hand and body movement, shape, orientation and location are within manual signs (Kelly et al., 2009) , while facial expressions, eye gaze, and shoulder movement are called non-manual signs (Halawani, 2008) . Furthermore, the finger spelling is used to spell letters of certain words, for example, names and technical terms that do not have sign equivalents (Liwicki and Everingham, 2009) . Normally, the communication between two hearingimpaired persons is smooth and understandable. The real challenge begins when a hearing person wants to interact with a hearing-impaired person (Wazalwar and Shrawankar, 2017) . Generally, if the target hearing-impaired person is educated, they try to communicate by exchanging written texts. Otherwise, they turn to a human sign language interpreter as a recourse if available, or else perhaps they end up with serious miscommunication (Wazalwar and Shrawankar, 2017). Although the spoken Kurdish dialects use different lexicons (Ahmadi et al., 2019) , the Kurdish Sign language, which is used in the Kurdistan Region of Iraq (KRI), uses the same lexicon among the hearingimpaired community regardless of the spoken dialect. While according to Jepsen et al.", |
| "cite_spans": [ |
| { |
| "start": 96, |
| "end": 118, |
| "text": "(Cormier et al., 2019)", |
| "ref_id": "BIBREF5" |
| }, |
| { |
| "start": 384, |
| "end": 399, |
| "text": "(Hassani, 2018)", |
| "ref_id": "BIBREF14" |
| }, |
| { |
| "start": 516, |
| "end": 546, |
| "text": "(Hassani and Medjedovic, 2016)", |
| "ref_id": "BIBREF13" |
| }, |
| { |
| "start": 1028, |
| "end": 1049, |
| "text": "(Cooper et al., 2011)", |
| "ref_id": "BIBREF4" |
| }, |
| { |
| "start": 1132, |
| "end": 1152, |
| "text": "(Kelly et al., 2009)", |
| "ref_id": "BIBREF16" |
| }, |
| { |
| "start": 1241, |
| "end": 1257, |
| "text": "(Halawani, 2008)", |
| "ref_id": "BIBREF9" |
| }, |
| { |
| "start": 1409, |
| "end": 1439, |
| "text": "(Liwicki and Everingham, 2009)", |
| "ref_id": "BIBREF18" |
| }, |
| { |
| "start": 1633, |
| "end": 1664, |
| "text": "(Wazalwar and Shrawankar, 2017)", |
| "ref_id": "BIBREF24" |
| }, |
| { |
| "start": 2019, |
| "end": 2040, |
| "text": "(Ahmadi et al., 2019)", |
| "ref_id": "BIBREF0" |
| } |
| ], |
| "ref_spans": [], |
| "eq_spans": [], |
| "section": "Introduction", |
| "sec_num": "1." |
| }, |
| { |
| "text": "(2015) KuSL is not standardized, applying guidelines by Mohammed (2007) and using the Kurdish Sign dictionaries (Nashat Salim et al., 2013; Ghazi Dizayee, 2000) in the KRI education programs show some efforts towards KuSL standardization. We develop a Kurdish Sign lexicon using the Kurdish Sign Language Dictionary (KuSLD) (Ghazi Dizayee, 2000) , which is used in KRI. Currently, no Kurdish Sign corpus is available, hence we aim at making Sorani texts sign-supported. That is, in the text conversion process we follow the spoken language and not the sign language structure. Sorani texts are mostly written in Persian-Arabic script (Hassani, 2018) hence we use the developed Kurdish Sign lexicon to make this type of the Sorani texts signsupported. The rest of this paper is organized as follows. Section 2. provides a brief background on sign language processing, Section 3. reviews the related work, Section 4. presents our approach, Section 5. illustrates the developed dataset, Section 6. discusses the results, finally, Section 7. concludes the paper.", |
| "cite_spans": [ |
| { |
| "start": 56, |
| "end": 71, |
| "text": "Mohammed (2007)", |
| "ref_id": "BIBREF20" |
| }, |
| { |
| "start": 112, |
| "end": 139, |
| "text": "(Nashat Salim et al., 2013;", |
| "ref_id": "BIBREF27" |
| }, |
| { |
| "start": 140, |
| "end": 160, |
| "text": "Ghazi Dizayee, 2000)", |
| "ref_id": "BIBREF26" |
| }, |
| { |
| "start": 324, |
| "end": 345, |
| "text": "(Ghazi Dizayee, 2000)", |
| "ref_id": "BIBREF26" |
| } |
| ], |
| "ref_spans": [], |
| "eq_spans": [], |
| "section": "Introduction", |
| "sec_num": "1." |
| }, |
| { |
| "text": "Sign languages are considered as genuine languages that place them among the minority languages (Senghas and Monaghan, 2002). Since sign languages consist of visual gestures rather than voice as it is in spoken languages. The analysis and feature extraction of the former significantly differ from latter languages. However, for some languages, a variant of sign language also exists that follows the spoken/written language grammar, which is called sign-supported language (Elliott et al., 2008) . The development of this variant is less challenging in the absence of required sign corpora and language models. The outcome could be used in various experimental and real-life occasions. Several approaches exist to process sign languages. In the following sections, we discuss those approaches which are more related to our current stage of research.", |
| "cite_spans": [ |
| { |
| "start": 474, |
| "end": 496, |
| "text": "(Elliott et al., 2008)", |
| "ref_id": "BIBREF8" |
| } |
| ], |
| "ref_spans": [], |
| "eq_spans": [], |
| "section": "Sign Language Processing", |
| "sec_num": "2." |
| }, |
| { |
| "text": "The sign visual gestures are normally denoted by special notations in order to be able to process them. Different notation systems are used to capture these gestures. The most popular ones are Stokoe, SignWriting, and HamNoSys. Stokoe was one of the earliest attempts for a sign language notation system (McCarty, 2004) . However, it was only concerned with manual sign representation, and it lacked any consideration for non-manual signs, such as eye gaze and shoulders movements, which are an essential entity to convey meaning by facial expression. SignWriting represents the signed gestures spatially in a 2D canvas (Bouzid and Jemni, 2013b) . It is designed to facilitate communication among the hearingimpaired community. HamNoSys (Hamburg Notation System) is a phonetic translation system with iconicity, extensibility, and formal syntax characteristics used to denote sign languages (Hanke, 2004) . A comparative analysis by (Dhanjal and Singh, 2019) concluded that HamNoSys is the most widely used notation system for a variety of sign languages. Ham-NoSys symbols are available as a Unicode font (Hanke, 2004) . This Unicode font symbolizes manual sign gestures and allows the generation of the signs by dividing the description into the handshapes, orientations, locations, and actions.", |
| "cite_spans": [ |
| { |
| "start": 304, |
| "end": 319, |
| "text": "(McCarty, 2004)", |
| "ref_id": "BIBREF19" |
| }, |
| { |
| "start": 620, |
| "end": 645, |
| "text": "(Bouzid and Jemni, 2013b)", |
| "ref_id": "BIBREF3" |
| }, |
| { |
| "start": 891, |
| "end": 904, |
| "text": "(Hanke, 2004)", |
| "ref_id": "BIBREF11" |
| }, |
| { |
| "start": 933, |
| "end": 958, |
| "text": "(Dhanjal and Singh, 2019)", |
| "ref_id": "BIBREF7" |
| }, |
| { |
| "start": 1106, |
| "end": 1119, |
| "text": "(Hanke, 2004)", |
| "ref_id": "BIBREF11" |
| } |
| ], |
| "ref_spans": [], |
| "eq_spans": [], |
| "section": "Notation Systems", |
| "sec_num": "2.1." |
| }, |
| { |
| "text": "To provide computer encoding for sign languages and to make their processing more efficient, several adoptions of the Extensive Markup Language (XML) have been suggested based on various sign notation systems. The Sign Writing Markup Language (SWML) is a markup language proposed by da Rocha Costa and Dimuro (2001) based on SignWriting. HamNoSys uses Signing Gesture Markup Language (SiGML), which gives a special XML tag to each Ham-NoSys symbol. These markup languages are used in different applications, for instance, to be given to a 3D avatar to animate the signs.", |
| "cite_spans": [], |
| "ref_spans": [], |
| "eq_spans": [], |
| "section": "Markup Languages", |
| "sec_num": "2.2." |
| }, |
| { |
| "text": "The only work on Kurdish Sign language processing that we were able to retrieve was by Hashim and Alizadeh (2018) wherein the researchers reported on their project on Kurdish Sign language recognition. That project focused on the recognition of Kurdish manual alphabets. Therefore, as literature does not report on active studies on Kurdish Sign language processing, we review the topic in the context of other languages. Sugandhi and Kaur (2018) introduced an online multilingual dictionary for avatar-based Indian Sign language. The system is designed to accept input from two languages English and Hindi. The input is transliterated into Hindi and then goes through the parser to be translated into Indian Sign Language (ISL). After extracting the root words of the input script, the target Hamburg Notations are retrieved from the database and converted into its corresponding SiGML. The generated SiGML is the input parameter for the Animation server, which uses Web Graphics Library (We-bGL) for the avatar representation. Aouiti (2013) proposed an approach to convert Arabic text into Arabic Sign language. The approach used an Arabic sentence/Sign language corpus as a core entity. The corpus includes Arabic sentences that were aligned with their corresponding sign representation. This helped to ensure that the represented sign refers to the real meaning of the input text. Afterward, the target sentence was syntactically and semantically analyzed by applying techniques, such as Morphological, Syntactic, Semantic, and Pragmatic analysis, which led to the generation of the glosses. The sign for each gloss was extracted from the corpus, which was sent to the avatar to be played. Bouzid and Jemni (2013a) developed an avatar-based system to enhance the usability and readability of notation systems for deaf people. The system was developed using SignWriting (SW) notation and its markup language. Their focus was to make the path easier for hearing-impaired people to understand and represent signs in a written format. Since SW is presented in a 2D format and it is easy to guess the target gestures from the written notations, this helps hearing-impaired people to learn different sign languages depending on the SW notations. SW is designed for daily communication purposes rather than linguistic and corpus development and processing. An automated reading system for SignWriting representation of Brazilian Sign language was introduced by Stiehl et al. (2015) . They focused on SignWriting of several Brazilian signs and classified the symbols into several categories. Again, their purpose was to build a database of SignWriting representation for Brazilian Sign Language in order to involve hearing-impaired people into learning the notations and enable them to communicate with each other. This approach can also be used to have books, newspapers, dictionaries and such that are written in notation symbols and can be understood by hearing-impaired people or sign learn-ers. To summarize, we follow the approach of Sugandhi and Kaur (2018) because of two reasons. First, because HamNoSys is a proper method for corpus development, and second because SignWiriting is majorly used for the communication among the hearing-impaired community and not between the hearing-impaired community and people with no hearing difficulties.", |
| "cite_spans": [ |
| { |
| "start": 87, |
| "end": 113, |
| "text": "Hashim and Alizadeh (2018)", |
| "ref_id": "BIBREF12" |
| }, |
| { |
| "start": 422, |
| "end": 446, |
| "text": "Sugandhi and Kaur (2018)", |
| "ref_id": "BIBREF23" |
| }, |
| { |
| "start": 1029, |
| "end": 1042, |
| "text": "Aouiti (2013)", |
| "ref_id": "BIBREF1" |
| }, |
| { |
| "start": 1694, |
| "end": 1718, |
| "text": "Bouzid and Jemni (2013a)", |
| "ref_id": "BIBREF2" |
| }, |
| { |
| "start": 2458, |
| "end": 2478, |
| "text": "Stiehl et al. (2015)", |
| "ref_id": "BIBREF22" |
| }, |
| { |
| "start": 3036, |
| "end": 3060, |
| "text": "Sugandhi and Kaur (2018)", |
| "ref_id": "BIBREF23" |
| } |
| ], |
| "ref_spans": [], |
| "eq_spans": [], |
| "section": "Related Work", |
| "sec_num": "3." |
| }, |
| { |
| "text": "We develop a dataset based on KuSLD. We also prepare and adapt a tool to translate Sorani texts into Kurdish Sign language to be animated by an avatar. We prepare the HamNoSys notations manually by analyzing the gestures available in KuSLD. We use the Ham2HPSG and eSIGN (Hanke and Popescu, 2003) to create the dataset and extract the SiGML codes. We implement a sign-supported tool based on the architecture that is shown in Figure 1 . In this architecture, the Language Model (LM), in its current form, is the developed dataset, which could be considered as the Kurdish (Sorani) sign lexicon. The input text goes through the tokenization process to extract the meaningful components from it. Similar to the existing sign-supported tools for other languages, the translation is word-by-word for the words that are found in the LM. Otherwise, the word will be replaced by a sequence of its letters in the sign language. Then the whole text is compiled into SiGML files, which will be sent to an avatar to be animated. We evaluate the tool by feeding it with input of four categories, namely alphabets, numbers, words, and sentences. The tool then plays the translation to the human individuals who are either hearing-impaired or Kurdish Sign language educators. As subjective understanding is not accurate (Kipp et al., 2011) , we ask the testers to write down their understanding. We calculate the accuracy by the percentage of correctly understood cases for played alphabets, numbers, words, and sentences. ", |
| "cite_spans": [ |
| { |
| "start": 271, |
| "end": 296, |
| "text": "(Hanke and Popescu, 2003)", |
| "ref_id": null |
| }, |
| { |
| "start": 1306, |
| "end": 1325, |
| "text": "(Kipp et al., 2011)", |
| "ref_id": "BIBREF17" |
| } |
| ], |
| "ref_spans": [ |
| { |
| "start": 426, |
| "end": 434, |
| "text": "Figure 1", |
| "ref_id": "FIGREF0" |
| } |
| ], |
| "eq_spans": [], |
| "section": "Method", |
| "sec_num": "4." |
| }, |
| { |
| "text": "The KuSLD consists of 2315 different sign gestures from 38 different categories. Our dataset, currently, consists of 20% of each category. However, we converted the alphabet and numbers completely. This adds more entries to the dataset, which sums up to approximately 560 entries. The KuSLD categories are listed in Table 1 . A sample for the prepared HamNoSys for Kurdish letters and words is shown in Figures 2 and 3 . We extracted the generated SiGML for the corresponding HamNoSys dataset, which was sent to an avatar to be animated. Two samples of extracted SiGML for the letter \u202b\"\u0628\"\u202c (B) and the word \u202b\"\u0632\u0627\ufee7\ufb91\ufbda\"\u202c (University) in Kurdish are shown in Figures 4 and 5.", |
| "cite_spans": [], |
| "ref_spans": [ |
| { |
| "start": 316, |
| "end": 323, |
| "text": "Table 1", |
| "ref_id": null |
| }, |
| { |
| "start": 403, |
| "end": 418, |
| "text": "Figures 2 and 3", |
| "ref_id": "FIGREF1" |
| } |
| ], |
| "eq_spans": [], |
| "section": "Developed Dataset", |
| "sec_num": "5." |
| }, |
| { |
| "text": "We played a sample of letters of the prepared dataset to the hearing-impaired individuals. The test showed a 100% understanding of the test data. The results of playing words showed a 65% correct understanding of the played words. The accuracy of the tool for understanding sentences was approximately 30%. In the evaluation process, the person could recognize all shown letters successfully since they are clearly shown in the dictionary. On the other hand, the signs for the words had a lower evaluation outcome. The person could not understand some of the words.", |
| "cite_spans": [], |
| "ref_spans": [], |
| "eq_spans": [], |
| "section": "Findings and Discussion", |
| "sec_num": "6." |
| }, |
| { |
| "text": "One reason for this was the usage of two different sign dictionaries in KRI. One of these dictionaries represents all signs based on the lexicon description, while the other (Ghazi Dizayee, 2000) uses vocal description for some of its entries. Our dataset was developed based on the latter. Both dictionaries are used interchangeably, but they provide different representations for specific signs depending on the context where they appear. This issue also affected sentence evaluation. Also, since we used a word by word translation, the hearing-impaired person was unable to understand Therefore, the sentence evaluation achieved low accuracy, which is typical for the sign-supported systems.", |
| "cite_spans": [], |
| "ref_spans": [], |
| "eq_spans": [], |
| "section": "Findings and Discussion", |
| "sec_num": "6." |
| }, |
| { |
| "text": "We used HamNoSys to develop a sign dataset and its equivalent SiGML for Kurdish. We chose HamNoSys over SignWriting because of our plan to develop Kurdish Sign corpora in the future. Our developed dataset includes approximately 560 entries consisting of the alphabet, numbers, and words. We also implemented a tool to translate Sorani texts into the Kurdish Sign language, which could be animated by an avatar. We evaluated the tool by showing the animated output to hearing-impaired persons on the three aspects of understanding the sign gestures, namely letters, words, and sentences. The test showed a 100% understanding for the letters, a 65% for isolated words, and approxi-mately 30% for sentences. The main reasons for the low accuracy were the usage of more than one sign dictionary in the target community and the word-by-word translation of the input texts.", |
| "cite_spans": [], |
| "ref_spans": [], |
| "eq_spans": [], |
| "section": "Conclusion", |
| "sec_num": "7." |
| }, |
| { |
| "text": "As future work, we are targeting the development of a language model based on the grammar of the Kurdish Sign language. Additionally, we aim to add more entries to the developed dataset. Furthermore, we would like to include other Kurdish dialects in the dataset.", |
| "cite_spans": [], |
| "ref_spans": [], |
| "eq_spans": [], |
| "section": "Conclusion", |
| "sec_num": "7." |
| }, |
| { |
| "text": "We would like to appreciate the assistance of the Erbil Hearing-impairment Association for their assistance to access the Kurdish Sign dictionaries. Also, we acknowledge the help of Hiwa Center for Deaf and Mute in Erbil, particularly Ms. Mehan Fatah and Ms. Shno Aziz, for providing us with Kurdish Sign language resources and for their help in the evaluation process. Furthermore, we appreciate the constructive feedback we received from anonymous reviewers, which has helped us to improve the quality of the paper.", |
| "cite_spans": [], |
| "ref_spans": [], |
| "eq_spans": [], |
| "section": "Acknowledgements", |
| "sec_num": "8." |
| } |
| ], |
| "back_matter": [], |
| "bib_entries": { |
| "BIBREF0": { |
| "ref_id": "b0", |
| "title": "Towards Electronic Lexicography for the Kurdish language", |
| "authors": [ |
| { |
| "first": "S", |
| "middle": [], |
| "last": "Ahmadi", |
| "suffix": "" |
| }, |
| { |
| "first": "H", |
| "middle": [], |
| "last": "Hassani", |
| "suffix": "" |
| }, |
| { |
| "first": "J", |
| "middle": [ |
| "P" |
| ], |
| "last": "Mccrae", |
| "suffix": "" |
| } |
| ], |
| "year": 2019, |
| "venue": "Proceedings of the sixth biennial conference on electronic lexicography (eLex). eLex", |
| "volume": "", |
| "issue": "", |
| "pages": "", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Ahmadi, S., Hassani, H., and McCrae, J. P. (2019). Towards Electronic Lexicography for the Kurdish language. In Proceedings of the sixth biennial con- ference on electronic lexicography (eLex). eLex 2019.", |
| "links": null |
| }, |
| "BIBREF1": { |
| "ref_id": "b1", |
| "title": "Towards an automatic translation from arabic text to sign language", |
| "authors": [ |
| { |
| "first": "N", |
| "middle": [], |
| "last": "Aouiti", |
| "suffix": "" |
| } |
| ], |
| "year": 2013, |
| "venue": "Fourth International Conference on Information and Communication Technology and Accessibility (ICTA)", |
| "volume": "", |
| "issue": "", |
| "pages": "1--4", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Aouiti, N. (2013). Towards an automatic translation from arabic text to sign language. In Fourth Inter- national Conference on Information and Communi- cation Technology and Accessibility (ICTA), pages 1-4. IEEE.", |
| "links": null |
| }, |
| "BIBREF2": { |
| "ref_id": "b2", |
| "title": "An animated avatar to interpret SignWriting transcription", |
| "authors": [ |
| { |
| "first": "Y", |
| "middle": [], |
| "last": "Bouzid", |
| "suffix": "" |
| }, |
| { |
| "first": "M", |
| "middle": [], |
| "last": "Jemni", |
| "suffix": "" |
| } |
| ], |
| "year": 2013, |
| "venue": "2013 International Conference on Electrical Engineering and Software Applications", |
| "volume": "", |
| "issue": "", |
| "pages": "1--5", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Bouzid, Y. and Jemni, M. (2013a). An animated avatar to interpret SignWriting transcription. In 2013 International Conference on Electrical Engi- neering and Software Applications, pages 1-5. IEEE.", |
| "links": null |
| }, |
| "BIBREF3": { |
| "ref_id": "b3", |
| "title": "An Avatar based approach for automatically interpreting a sign language notation", |
| "authors": [ |
| { |
| "first": "Y", |
| "middle": [], |
| "last": "Bouzid", |
| "suffix": "" |
| }, |
| { |
| "first": "M", |
| "middle": [], |
| "last": "Jemni", |
| "suffix": "" |
| } |
| ], |
| "year": 2013, |
| "venue": "2013 IEEE 13th International Conference on Advanced Learning Technologies", |
| "volume": "", |
| "issue": "", |
| "pages": "92--94", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Bouzid, Y. and Jemni, M. (2013b). An Avatar based approach for automatically interpreting a sign lan- guage notation. In 2013 IEEE 13th International Conference on Advanced Learning Technologies, pages 92-94. IEEE.", |
| "links": null |
| }, |
| "BIBREF4": { |
| "ref_id": "b4", |
| "title": "Sign language recognition", |
| "authors": [ |
| { |
| "first": "H", |
| "middle": [], |
| "last": "Cooper", |
| "suffix": "" |
| }, |
| { |
| "first": "B", |
| "middle": [], |
| "last": "Holt", |
| "suffix": "" |
| }, |
| { |
| "first": "R", |
| "middle": [], |
| "last": "Bowden", |
| "suffix": "" |
| } |
| ], |
| "year": 2011, |
| "venue": "Visual Analysis of Humans", |
| "volume": "", |
| "issue": "", |
| "pages": "539--562", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Cooper, H., Holt, B., and Bowden, R. (2011). Sign language recognition. In Visual Analysis of Humans, pages 539-562. Springer.", |
| "links": null |
| }, |
| "BIBREF5": { |
| "ref_id": "b5", |
| "title": "ExTOL: Automatic recognition of British Sign Language using the BSL Corpus", |
| "authors": [ |
| { |
| "first": "K", |
| "middle": [], |
| "last": "Cormier", |
| "suffix": "" |
| }, |
| { |
| "first": "N", |
| "middle": [], |
| "last": "Fox", |
| "suffix": "" |
| }, |
| { |
| "first": "B", |
| "middle": [], |
| "last": "Woll", |
| "suffix": "" |
| }, |
| { |
| "first": "A", |
| "middle": [], |
| "last": "Zisserman", |
| "suffix": "" |
| }, |
| { |
| "first": "N", |
| "middle": [ |
| "C" |
| ], |
| "last": "Camg\u00f6z", |
| "suffix": "" |
| }, |
| { |
| "first": "R", |
| "middle": [], |
| "last": "Bowden", |
| "suffix": "" |
| } |
| ], |
| "year": 2019, |
| "venue": "Proceedings of 6th Workshop on Sign Language Translation and Avatar Technology (SLTAT)", |
| "volume": "", |
| "issue": "", |
| "pages": "", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Cormier, K., Fox, N., Woll, B., Zisserman, A., Camg\u00f6z, N. C., and Bowden, R. (2019). ExTOL: Automatic recognition of British Sign Language us- ing the BSL Corpus. In Proceedings of 6th Workshop on Sign Language Translation and Avatar Technol- ogy (SLTAT) 2019. Universitat Hamburg.", |
| "links": null |
| }, |
| "BIBREF6": { |
| "ref_id": "b6", |
| "title": "SignWriting-Based Sign Language Processing", |
| "authors": [ |
| { |
| "first": "A", |
| "middle": [ |
| "C" |
| ], |
| "last": "Da Rocha Costa", |
| "suffix": "" |
| }, |
| { |
| "first": "G", |
| "middle": [ |
| "P" |
| ], |
| "last": "Dimuro", |
| "suffix": "" |
| } |
| ], |
| "year": 2001, |
| "venue": "International Gesture Workshop", |
| "volume": "", |
| "issue": "", |
| "pages": "202--205", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "da Rocha Costa, A. C. and Dimuro, G. P. (2001). SignWriting-Based Sign Language Processing. In International Gesture Workshop, pages 202-205. Springer.", |
| "links": null |
| }, |
| "BIBREF7": { |
| "ref_id": "b7", |
| "title": "Comparative Analysis of Sign Language Notation Systems for Indian Sign Language", |
| "authors": [ |
| { |
| "first": "A", |
| "middle": [ |
| "S" |
| ], |
| "last": "Dhanjal", |
| "suffix": "" |
| }, |
| { |
| "first": "W", |
| "middle": [], |
| "last": "Singh", |
| "suffix": "" |
| } |
| ], |
| "year": 2019, |
| "venue": "2019 Second International Conference on Advanced Computational and Communication Paradigms (ICACCP)", |
| "volume": "", |
| "issue": "", |
| "pages": "1--6", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Dhanjal, A. S. and Singh, W. (2019). Comparative Analysis of Sign Language Notation Systems for In- dian Sign Language. In 2019 Second International Conference on Advanced Computational and Com- munication Paradigms (ICACCP), pages 1-6. IEEE.", |
| "links": null |
| }, |
| "BIBREF8": { |
| "ref_id": "b8", |
| "title": "Linguistic modelling and language-processing technologies for Avatar-based sign language presentation", |
| "authors": [ |
| { |
| "first": "R", |
| "middle": [], |
| "last": "Elliott", |
| "suffix": "" |
| }, |
| { |
| "first": "J", |
| "middle": [ |
| "R" |
| ], |
| "last": "Glauert", |
| "suffix": "" |
| }, |
| { |
| "first": "J", |
| "middle": [], |
| "last": "Kennaway", |
| "suffix": "" |
| }, |
| { |
| "first": "I", |
| "middle": [], |
| "last": "Marshall", |
| "suffix": "" |
| }, |
| { |
| "first": "E", |
| "middle": [], |
| "last": "Safar", |
| "suffix": "" |
| } |
| ], |
| "year": 2008, |
| "venue": "Universal Access in the Information Society", |
| "volume": "6", |
| "issue": "4", |
| "pages": "375--391", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Elliott, R., Glauert, J. R., Kennaway, J., Marshall, I., and Safar, E. (2008). Linguistic modelling and language-processing technologies for Avatar-based sign language presentation. Universal Access in the Information Society, 6(4):375-391.", |
| "links": null |
| }, |
| "BIBREF9": { |
| "ref_id": "b9", |
| "title": "Arabic sign language translation system on mobile devices", |
| "authors": [ |
| { |
| "first": "S", |
| "middle": [ |
| "M" |
| ], |
| "last": "Halawani", |
| "suffix": "" |
| } |
| ], |
| "year": 2008, |
| "venue": "IJCSNS International Journal of Computer Science and Network Security", |
| "volume": "8", |
| "issue": "1", |
| "pages": "251--256", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Halawani, S. M. (2008). Arabic sign language trans- lation system on mobile devices. IJCSNS Interna- tional Journal of Computer Science and Network Security, 8(1):251-256.", |
| "links": null |
| }, |
| "BIBREF11": { |
| "ref_id": "b11", |
| "title": "HamNoSys-representing sign language data in language resources and language processing contexts", |
| "authors": [ |
| { |
| "first": "T", |
| "middle": [], |
| "last": "Hanke", |
| "suffix": "" |
| } |
| ], |
| "year": 2004, |
| "venue": "LREC", |
| "volume": "4", |
| "issue": "", |
| "pages": "1--6", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Hanke, T. (2004). HamNoSys-representing sign lan- guage data in language resources and language pro- cessing contexts. In LREC, volume 4, pages 1-6.", |
| "links": null |
| }, |
| "BIBREF12": { |
| "ref_id": "b12", |
| "title": "Kurdish Sign Language Recognition System", |
| "authors": [ |
| { |
| "first": "A", |
| "middle": [ |
| "D" |
| ], |
| "last": "Hashim", |
| "suffix": "" |
| }, |
| { |
| "first": "F", |
| "middle": [], |
| "last": "Alizadeh", |
| "suffix": "" |
| } |
| ], |
| "year": 2018, |
| "venue": "UKH Journal of Science and Engineering", |
| "volume": "2", |
| "issue": "1", |
| "pages": "1--6", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Hashim, A. D. and Alizadeh, F. (2018). Kurdish Sign Language Recognition System. UKH Journal of Sci- ence and Engineering, 2(1):1-6.", |
| "links": null |
| }, |
| "BIBREF13": { |
| "ref_id": "b13", |
| "title": "Automatic Kurdish dialects identification", |
| "authors": [ |
| { |
| "first": "H", |
| "middle": [], |
| "last": "Hassani", |
| "suffix": "" |
| }, |
| { |
| "first": "D", |
| "middle": [], |
| "last": "Medjedovic", |
| "suffix": "" |
| } |
| ], |
| "year": 2016, |
| "venue": "Computer Science & Information Technology", |
| "volume": "6", |
| "issue": "2", |
| "pages": "61--78", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Hassani, H. and Medjedovic, D. (2016). Automatic Kurdish dialects identification. Computer Science & Information Technology, 6(2):61-78.", |
| "links": null |
| }, |
| "BIBREF14": { |
| "ref_id": "b14", |
| "title": "BLARK for multi-dialect languages: towards the Kurdish BLARK. Language Resources and Evaluation", |
| "authors": [ |
| { |
| "first": "H", |
| "middle": [], |
| "last": "Hassani", |
| "suffix": "" |
| } |
| ], |
| "year": 2018, |
| "venue": "", |
| "volume": "52", |
| "issue": "", |
| "pages": "625--644", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Hassani, H. (2018). BLARK for multi-dialect lan- guages: towards the Kurdish BLARK. Language Resources and Evaluation, 52(2):625-644.", |
| "links": null |
| }, |
| "BIBREF15": { |
| "ref_id": "b15", |
| "title": "Sign languages of the world: A comparative handbook", |
| "authors": [ |
| { |
| "first": "J", |
| "middle": [ |
| "B" |
| ], |
| "last": "Jepsen", |
| "suffix": "" |
| }, |
| { |
| "first": "G", |
| "middle": [], |
| "last": "De Clerck", |
| "suffix": "" |
| }, |
| { |
| "first": "S", |
| "middle": [], |
| "last": "Lutalo-Kiingi", |
| "suffix": "" |
| }, |
| { |
| "first": "W", |
| "middle": [ |
| "B" |
| ], |
| "last": "Mc-Gregor", |
| "suffix": "" |
| } |
| ], |
| "year": 2015, |
| "venue": "", |
| "volume": "", |
| "issue": "", |
| "pages": "", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Jepsen, J. B., De Clerck, G., Lutalo-Kiingi, S., and Mc- Gregor, W. B. (2015). Sign languages of the world: A comparative handbook. De Gruyter.", |
| "links": null |
| }, |
| "BIBREF16": { |
| "ref_id": "b16", |
| "title": "A framework for continuous multimodal sign language recognition", |
| "authors": [ |
| { |
| "first": "D", |
| "middle": [], |
| "last": "Kelly", |
| "suffix": "" |
| }, |
| { |
| "first": "J", |
| "middle": [], |
| "last": "Reilly Delannoy", |
| "suffix": "" |
| }, |
| { |
| "first": "J", |
| "middle": [], |
| "last": "Mc Donald", |
| "suffix": "" |
| }, |
| { |
| "first": "C", |
| "middle": [], |
| "last": "Markham", |
| "suffix": "" |
| } |
| ], |
| "year": 2009, |
| "venue": "Proceedings of the 2009 international conference on Multimodal interfaces", |
| "volume": "", |
| "issue": "", |
| "pages": "351--358", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Kelly, D., Reilly Delannoy, J., Mc Donald, J., and Markham, C. (2009). A framework for continuous multimodal sign language recognition. In Proceed- ings of the 2009 international conference on Multi- modal interfaces, pages 351-358.", |
| "links": null |
| }, |
| "BIBREF17": { |
| "ref_id": "b17", |
| "title": "Sign language avatars: Animation and comprehensibility", |
| "authors": [ |
| { |
| "first": "M", |
| "middle": [], |
| "last": "Kipp", |
| "suffix": "" |
| }, |
| { |
| "first": "A", |
| "middle": [], |
| "last": "Heloir", |
| "suffix": "" |
| }, |
| { |
| "first": "Q", |
| "middle": [], |
| "last": "Nguyen", |
| "suffix": "" |
| } |
| ], |
| "year": 2011, |
| "venue": "International Workshop on Intelligent Virtual Agents", |
| "volume": "", |
| "issue": "", |
| "pages": "113--126", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Kipp, M., Heloir, A., and Nguyen, Q. (2011). Sign language avatars: Animation and comprehensibil- ity. In International Workshop on Intelligent Virtual Agents, pages 113-126. Springer.", |
| "links": null |
| }, |
| "BIBREF18": { |
| "ref_id": "b18", |
| "title": "Automatic recognition of fingerspelled words in British Sign Language", |
| "authors": [ |
| { |
| "first": "S", |
| "middle": [], |
| "last": "Liwicki", |
| "suffix": "" |
| }, |
| { |
| "first": "M", |
| "middle": [], |
| "last": "Everingham", |
| "suffix": "" |
| } |
| ], |
| "year": 2009, |
| "venue": "IEEE computer society conference on computer vision and pattern recognition workshops", |
| "volume": "", |
| "issue": "", |
| "pages": "50--57", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Liwicki, S. and Everingham, M. (2009). Automatic recognition of fingerspelled words in British Sign Language. In 2009 IEEE computer society con- ference on computer vision and pattern recognition workshops, pages 50-57. IEEE.", |
| "links": null |
| }, |
| "BIBREF19": { |
| "ref_id": "b19", |
| "title": "Notation systems for reading and writing sign language. The Analysis of verbal behavior", |
| "authors": [ |
| { |
| "first": "A", |
| "middle": [ |
| "L" |
| ], |
| "last": "Mccarty", |
| "suffix": "" |
| } |
| ], |
| "year": 2004, |
| "venue": "", |
| "volume": "20", |
| "issue": "", |
| "pages": "129--134", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "McCarty, A. L. (2004). Notation systems for reading and writing sign language. The Analysis of verbal behavior, 20(1):129-134.", |
| "links": null |
| }, |
| "BIBREF20": { |
| "ref_id": "b20", |
| "title": "The Sign Language for Deaf", |
| "authors": [ |
| { |
| "first": "M", |
| "middle": [ |
| "F" |
| ], |
| "last": "Mohammed", |
| "suffix": "" |
| } |
| ], |
| "year": 2007, |
| "venue": "Ministry of Labor and Social Affairs", |
| "volume": "", |
| "issue": "", |
| "pages": "", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Mohammed, M. F. (2007). The Sign Language for Deaf [In Arabic]. Ministry of Labor and Social Af- fairs, Iraq.", |
| "links": null |
| }, |
| "BIBREF21": { |
| "ref_id": "b21", |
| "title": "Signs of their times: Deaf communities and the culture of language", |
| "authors": [ |
| { |
| "first": "R", |
| "middle": [ |
| "J" |
| ], |
| "last": "Senghas", |
| "suffix": "" |
| }, |
| { |
| "first": "L", |
| "middle": [], |
| "last": "Monaghan", |
| "suffix": "" |
| } |
| ], |
| "year": 2002, |
| "venue": "Annual Review of Anthropology", |
| "volume": "31", |
| "issue": "1", |
| "pages": "69--97", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Senghas, R. J. and Monaghan, L. (2002). Signs of their times: Deaf communities and the culture of language. Annual Review of Anthropology, 31(1):69- 97.", |
| "links": null |
| }, |
| "BIBREF22": { |
| "ref_id": "b22", |
| "title": "Towards a SignWriting recognition system", |
| "authors": [ |
| { |
| "first": "D", |
| "middle": [], |
| "last": "Stiehl", |
| "suffix": "" |
| }, |
| { |
| "first": "L", |
| "middle": [], |
| "last": "Addams", |
| "suffix": "" |
| }, |
| { |
| "first": "L", |
| "middle": [ |
| "S" |
| ], |
| "last": "Oliveira", |
| "suffix": "" |
| }, |
| { |
| "first": "C", |
| "middle": [], |
| "last": "Guimar\u00e3es", |
| "suffix": "" |
| }, |
| { |
| "first": "A", |
| "middle": [], |
| "last": "Britto", |
| "suffix": "" |
| } |
| ], |
| "year": 2015, |
| "venue": "2015 13th International Conference on Document Analysis and Recognition (IC-DAR)", |
| "volume": "", |
| "issue": "", |
| "pages": "26--30", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Stiehl, D., Addams, L., Oliveira, L. S., Guimar\u00e3es, C., and Britto, A. (2015). Towards a SignWriting recognition system. In 2015 13th International Con- ference on Document Analysis and Recognition (IC- DAR), pages 26-30. IEEE.", |
| "links": null |
| }, |
| "BIBREF23": { |
| "ref_id": "b23", |
| "title": "Online Multilinguial Dictionary Using Hamburg Notaiton for Avatar-Based Indian Sign Language Generation System", |
| "authors": [ |
| { |
| "first": "P", |
| "middle": [ |
| "K" |
| ], |
| "last": "Sugandhi", |
| "suffix": "" |
| }, |
| { |
| "first": "S", |
| "middle": [], |
| "last": "Kaur", |
| "suffix": "" |
| } |
| ], |
| "year": 2018, |
| "venue": "Int. J. Cogn. Lang. Sci", |
| "volume": "12", |
| "issue": "8", |
| "pages": "1116--1122", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Sugandhi, P. K. and Kaur, S. (2018). Online Mul- tilinguial Dictionary Using Hamburg Notaiton for Avatar-Based Indian Sign Language Generation Sys- tem. Int. J. Cogn. Lang. Sci., 12(8):1116-1122.", |
| "links": null |
| }, |
| "BIBREF24": { |
| "ref_id": "b24", |
| "title": "Interpretation of sign language into English using NLP techniques", |
| "authors": [ |
| { |
| "first": "S", |
| "middle": [ |
| "S" |
| ], |
| "last": "Wazalwar", |
| "suffix": "" |
| }, |
| { |
| "first": "U", |
| "middle": [], |
| "last": "Shrawankar", |
| "suffix": "" |
| } |
| ], |
| "year": 2017, |
| "venue": "Journal of Information and Optimization Sciences", |
| "volume": "38", |
| "issue": "6", |
| "pages": "895--910", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Wazalwar, S. S. and Shrawankar, U. (2017). Inter- pretation of sign language into English using NLP techniques. Journal of Information and Optimiza- tion Sciences, 38(6):895-910.", |
| "links": null |
| }, |
| "BIBREF26": { |
| "ref_id": "b26", |
| "title": "The independent Human Rights Commission-Ministry of Labor and Social Affairs of Kurdistan Regional Government -Iraq-UNICEF-MEDS Organisation", |
| "authors": [ |
| { |
| "first": "Adwiya", |
| "middle": [], |
| "last": "Ghazi Dizayee", |
| "suffix": "" |
| } |
| ], |
| "year": 2000, |
| "venue": "", |
| "volume": "", |
| "issue": "", |
| "pages": "", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Ghazi Dizayee, Adwiya. (2000). Sign Dictionary for Hearing-Impairment [In Kurdish]. The independent Human Rights Commission-Ministry of Labor and Social Affairs of Kurdistan Regional Government - Iraq-UNICEF-MEDS Organisation.", |
| "links": null |
| }, |
| "BIBREF27": { |
| "ref_id": "b27", |
| "title": "The independent Human Rights Commission-Ministry of Labor and Social Affairs of Kurdistan Regional Government -Iraq", |
| "authors": [ |
| { |
| "first": "Nashat", |
| "middle": [], |
| "last": "Salim", |
| "suffix": "" |
| }, |
| { |
| "first": "Nazanin", |
| "middle": [], |
| "last": "", |
| "suffix": "" |
| }, |
| { |
| "first": "Hama", |
| "middle": [], |
| "last": "Rashid", |
| "suffix": "" |
| }, |
| { |
| "first": "", |
| "middle": [], |
| "last": "Omar", |
| "suffix": "" |
| }, |
| { |
| "first": "", |
| "middle": [], |
| "last": "Salam", |
| "suffix": "" |
| } |
| ], |
| "year": 2013, |
| "venue": "", |
| "volume": "", |
| "issue": "", |
| "pages": "", |
| "other_ids": {}, |
| "num": null, |
| "urls": [], |
| "raw_text": "Nashat Salim, Nazanin and Hama Rashid, Jamil and Haji Omar, Salam. (2013). Sign Dictionary for Hearing-Impairment [In Kurdish]. The independent Human Rights Commission-Ministry of Labor and Social Affairs of Kurdistan Regional Government - Iraq.", |
| "links": null |
| } |
| }, |
| "ref_entries": { |
| "FIGREF0": { |
| "uris": null, |
| "text": "Kurdish text to sign proposed architecture", |
| "type_str": "figure", |
| "num": null |
| }, |
| "FIGREF1": { |
| "uris": null, |
| "text": "HamNoSys Sample for Kurdish words", |
| "type_str": "figure", |
| "num": null |
| }, |
| "FIGREF2": { |
| "uris": null, |
| "text": "HamNoSys Sample for Kurdish letters", |
| "type_str": "figure", |
| "num": null |
| }, |
| "FIGREF3": { |
| "uris": null, |
| "text": "SiGML sample for letter \u202b\"\u0628\"\u202c SiGML sample for word \u202b\"\u0632\u0627\ufee7\ufb91\ufbda\"\u202c the meaning of a majority of the sentences as a whole.", |
| "type_str": "figure", |
| "num": null |
| } |
| } |
| } |
| } |