| | |
| | """caner_replicate.ipynb |
| | |
| | Automatically generated by Colaboratory. |
| | |
| | Original file is located at |
| | https://colab.research.google.com/drive/1QgToSf5_B5l43oRCKwI2vGEB0YcRUtHe |
| | """ |
| |
|
| | import csv |
| | import os |
| |
|
| | import datasets |
| |
|
| |
|
| | _CITATION = """\ |
| | @article{article, |
| | author = {Salah, Ramzi and Zakaria, Lailatul}, |
| | year = {2018}, |
| | month = {12}, |
| | pages = {}, |
| | title = {BUILDING THE CLASSICAL ARABIC NAMED ENTITY RECOGNITION CORPUS (CANERCORPUS)}, |
| | volume = {96}, |
| | journal = {Journal of Theoretical and Applied Information Technology} |
| | } |
| | """ |
| |
|
| | _DESCRIPTION = """\ |
| | Classical Arabic Named Entity Recognition corpus as a new corpus of tagged data that can be useful for handling the issues in recognition of Arabic named entities. |
| | """ |
| |
|
| | _HOMEPAGE = "https://github.com/RamziSalah/Classical-Arabic-Named-Entity-Recognition-Corpus" |
| | _HOMEPAGE = "https://github.com/omarmohamed2011/caner_data_copy" |
| | |
| | _LICENSE = "" |
| |
|
| | |
| | _URL = 'https://github.com/omarmohamed2011/caner_data_copy/blob/main/df_caner.zip' |
| |
|
| | class Caner(datasets.GeneratorBasedBuilder): |
| | """Classical Arabic Named Entity Recognition corpus as a new corpus of tagged data that can be useful for handling the issues in recognition of Arabic named entities""" |
| |
|
| | VERSION = datasets.Version("1.1.0") |
| |
|
| | def _info(self): |
| |
|
| | features = datasets.Features( |
| | { |
| | "token": datasets.Value("string"), |
| | "ner_tag": datasets.ClassLabel( |
| | names =['MalikIbnAnas', |
| | 'Sibawayh', |
| | 'IbnHisham', |
| | 'IbnSulaymanKufi', |
| | 'Bukhari', |
| | 'KhalilFarahidi', |
| | 'FathIbnKhaqan', |
| | 'IbnHamzaKisai', |
| | 'IbnTayfur', |
| | 'MuhammadBarjlani', |
| | 'ImamCaskari', |
| | 'AbdAllahIbnCabbas', |
| | 'IbnAyyubRazi', |
| | 'IbnWaddahQurtubi', |
| | 'HasanBasri', |
| | 'IbnAbiKhaythama', |
| | 'YahyaIbnHusayn', |
| | 'SufyanThawri', |
| | 'IbnQuraybAsmaci', |
| | 'IbnIsmacilKirmani', |
| | 'IbnCimranMawsili', |
| | 'Mubarrad', |
| | 'MuhammadShaybani', |
| | 'AbuZurcaDimashqi', |
| | 'IbnWahbQurashi', |
| | 'MacmarIbnMuthanna', |
| | 'YahyaIbnSalam', |
| | 'AbuHasanSacdi', |
| | 'IbnIbrahimBursi', |
| | 'IbnSirin', |
| | 'Baladhuri', |
| | 'CaliIbnAbiTalib', |
| | 'IbnZiyadFarra', |
| | 'AbuYusufYacqub', |
| | 'IbnHanbal', |
| | 'ZubayrIbnBakkar', |
| | 'AbuBakrBazzar', |
| | 'Fakihi', |
| | 'IbnMuzahimMinqari', |
| | 'AbyZurca', |
| | 'AkhfashAwsat', |
| | 'AhmadBarqi', |
| | 'IbnAhmadIbnHanbal', |
| | 'IbnCabdHakam', |
| | 'CabdRazzakSancani', |
| | 'AbuHatimSijistani', |
| | 'IbnSacd', |
| | 'IbnHammadKhuzaci', |
| | 'IbnCaliMarwazi', |
| | 'MujahidIbnJabr', |
| | 'Bahshal', |
| | 'IbnHasanSaffar'] |
| |
|
| | ), |
| | } |
| | ) |
| |
|
| | return datasets.DatasetInfo( |
| | description=_DESCRIPTION, |
| | features=features, |
| | supervised_keys=None, |
| | homepage=_HOMEPAGE, |
| | license=_LICENSE, |
| | citation=_CITATION, |
| | ) |
| |
|
| | def _split_generators(self, dl_manager): |
| | """Returns SplitGenerators.""" |
| |
|
| | my_urls = _URL |
| | data_dir = dl_manager.download_and_extract(my_urls) |
| | |
| | data_dir = '/content/df_caner.csv' |
| | return [ |
| | datasets.SplitGenerator( |
| | name=datasets.Split.TRAIN, |
| | |
| | |
| | gen_kwargs={ |
| | "filepath": data_dir, |
| | |
| | "split": "train", |
| | }, |
| | ) |
| | ] |
| |
|
| | def _generate_examples(self, filepath, split): |
| | """Yields examples.""" |
| |
|
| | with open(filepath, encoding="utf-8") as csv_file: |
| | reader = csv.reader(csv_file, delimiter=",") |
| | next(reader, None) |
| |
|
| | for id_, row in enumerate(reader): |
| |
|
| | yield id_, { |
| | "token": row[0], |
| | "ner_tag": row[1], |
| | } |