Spaces:
Running
Running
| { | |
| "model": { | |
| "dtype": "auto", | |
| "max_length": "1024", | |
| "model": "boun-tabi-LMG/TURNA", | |
| "api": "hf", | |
| "architecture": "T5ForConditionalGeneration", | |
| "type": "pretrained", | |
| "num_parameters": "1b" | |
| }, | |
| "results": [ | |
| { | |
| "name": "xquad_tr", | |
| "task": "extractive_question_answering", | |
| "exact_match": 0.0, | |
| "f1": 0.0 | |
| }, | |
| { | |
| "name": "xlsum_tr", | |
| "task": "summarization", | |
| "rouge1": 0.1904384366601188, | |
| "rouge2": 0.060686113611140166, | |
| "rougeL": 0.1311090280660866 | |
| }, | |
| { | |
| "name": "xcopa_tr", | |
| "task": "multiple_choice", | |
| "acc": 0.558, | |
| "acc_norm": 0.558 | |
| }, | |
| { | |
| "name": "wmt-tr-en-prompt", | |
| "task": "machine_translation", | |
| "wer": 3.9036796738046218, | |
| "bleu": 0.0008286617236874524 | |
| }, | |
| { | |
| "name": "wiki_lingua_tr", | |
| "task": "summarization", | |
| "rouge1": 0.18435291474691423, | |
| "rouge2": 0.05584649726914134, | |
| "rougeL": 0.13446021077350823 | |
| }, | |
| { | |
| "name": "turkish_plu", | |
| "task": "multiple_choice", | |
| "acc": 0.40288, | |
| "acc_norm": 0.44608 | |
| }, | |
| { | |
| "name": "turkish_plu_goal_inference", | |
| "task": "multiple_choice", | |
| "acc": 0.37992831541218636, | |
| "acc_norm": 0.35722819593787336 | |
| }, | |
| { | |
| "name": "turkish_plu_next_event_prediction", | |
| "task": "multiple_choice", | |
| "acc": 0.383206106870229, | |
| "acc_norm": 0.4488549618320611 | |
| }, | |
| { | |
| "name": "turkish_plu_step_inference", | |
| "task": "multiple_choice", | |
| "acc": 0.272875816993464, | |
| "acc_norm": 0.4542483660130719 | |
| }, | |
| { | |
| "name": "turkish_plu_step_ordering", | |
| "task": "multiple_choice", | |
| "acc": 0.5122428991185113, | |
| "acc_norm": 0.5122428991185113 | |
| }, | |
| { | |
| "name": "check_worthiness", | |
| "task": "multiple_choice", | |
| "acc": 0.42230347349177333, | |
| "acc_norm": 0.620201096892139 | |
| }, | |
| { | |
| "name": "relevance_judgment", | |
| "task": "multiple_choice", | |
| "acc": 0.4904021937842779, | |
| "acc_norm": 0.5781535648994516 | |
| }, | |
| { | |
| "name": "tr-wikihow-summ", | |
| "task": "summarization", | |
| "rouge1": 0.20515501424269858, | |
| "rouge2": 0.05693981251975118, | |
| "rougeL": 0.1449313333992171 | |
| }, | |
| { | |
| "name": "tquad", | |
| "task": "extractive_question_answering", | |
| "exact_match": 0.0, | |
| "f1": 0.0003736920777279522 | |
| }, | |
| { | |
| "name": "sts_tr", | |
| "task": "text_classification", | |
| "acc": 0.14213197969543148, | |
| "acc_norm": 0.19506889050036258 | |
| }, | |
| { | |
| "name": "offenseval_tr", | |
| "task": "text_classification", | |
| "acc": 0.5099206349206349, | |
| "acc_norm": 0.7970521541950113 | |
| }, | |
| { | |
| "name": "mnli_tr", | |
| "task": "natural_language_inference", | |
| "acc": 0.3494, | |
| "acc_norm": 0.3464 | |
| }, | |
| { | |
| "name": "snli_tr", | |
| "task": "natural_language_inference", | |
| "acc": 0.3381, | |
| "acc_norm": 0.3343 | |
| }, | |
| { | |
| "name": "xnli_tr", | |
| "task": "natural_language_inference", | |
| "acc": 0.3341317365269461, | |
| "acc_norm": 0.33812375249501 | |
| }, | |
| { | |
| "name": "news_cat", | |
| "task": "text_classification", | |
| "acc": 0.328, | |
| "acc_norm": 0.208 | |
| }, | |
| { | |
| "name": "mlsum_tr", | |
| "task": "summarization", | |
| "rouge1": 0.20830277213555015, | |
| "rouge2": 0.11040542892341527, | |
| "rougeL": 0.16135585618616377 | |
| }, | |
| { | |
| "name": "mkqa_tr", | |
| "task": "extractive_question_answering", | |
| "exact_match": 0.0011837821840781297, | |
| "f1": 0.006720430107526878 | |
| }, | |
| { | |
| "name": "ironytr", | |
| "task": "text_classification", | |
| "acc": 0.48333333333333334, | |
| "acc_norm": 0.5033333333333333 | |
| }, | |
| { | |
| "name": "gecturk_generation", | |
| "task": "grammatical_error_correction", | |
| "exact_match": 0.0 | |
| }, | |
| { | |
| "name": "exams_tr", | |
| "task": "multiple_choice", | |
| "acc": 0.2366412213740458, | |
| "acc_norm": 0.2748091603053435 | |
| }, | |
| { | |
| "name": "belebele_tr", | |
| "task": "multiple_choice", | |
| "acc": 0.22555555555555556, | |
| "acc_norm": 0.22555555555555556 | |
| }, | |
| { | |
| "name": "turkce_atasozleri", | |
| "task": "multiple_choice", | |
| "acc": 0.19248554913294796, | |
| "acc_norm": 0.19248554913294796 | |
| }, | |
| { | |
| "name": "turkishmmlu", | |
| "task": "multiple_choice", | |
| "acc": 0.19333333333333333, | |
| "acc_norm": 0.19333333333333333 | |
| }, | |
| { | |
| "name": "bilmecebench", | |
| "task": "multiple_choice", | |
| "acc": 0.2420814479638009, | |
| "acc_norm": 0.2420814479638009 | |
| }, | |
| { | |
| "name": "circumflex_tr", | |
| "task": "multiple_choice", | |
| "acc": 0.5142857142857142, | |
| "acc_norm": 0.5142857142857142 | |
| }, | |
| { | |
| "name": "wmt-en-tr-prompt", | |
| "task": "machine_translation", | |
| "wer": 2.8753770722816165, | |
| "bleu": 0.002738382660305397 | |
| } | |
| ] | |
| } | |