Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- scripts/yans/lm-evaluation-harness/tests/testdata/anli_r1-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/arithmetic_1dc-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_animate_subject_trans-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_2-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_irregular_1-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_irregular_2-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_irregular_2-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_drop_argument-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_existential_there_quantifiers_2-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_left_branch_island_simple_question-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_npi_present_2-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_principle_A_case_1-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_principle_A_case_2-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_principle_A_domain_1-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_principle_A_domain_3-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_superlative_quantifiers_1-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_tough_vs_raising_1-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_wh_questions_subject_gap_long_distance-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/blimp_wh_vs_that_with_gap-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/cb-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/cola-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/coqa-v1-greedy_until +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/crows_pairs_english_age-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/crows_pairs_english_physical_appearance-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/crows_pairs_english_sexual_orientation-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/crows_pairs_french_sexual_orientation-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/cycle_letters-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/ethics_cm-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/gguf_test_44e268d15decc4d2d0f99e57e1476269826cd3b54262f7a0981f75ddd45b25d0.pkl +3 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/gpt3_test_8025023377febbd8c5f2b9f26705c394ff375d0cad7c89c10fd9b8e1eb66ff1c.pkl +3 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/headqa-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/headqa_es-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/headqa_es-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-abstract_algebra-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-electrical_engineering-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_biology-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_biology-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_computer_science-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_european_history-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_macroeconomics-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-moral_disputes-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-nutrition-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-prehistory-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-professional_law-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-professional_medicine-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/lambada_mt_de-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/lambada_mt_fr-v0-res.json +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/lambada_standard_cloze-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/logiqa-v0-loglikelihood +1 -0
- scripts/yans/lm-evaluation-harness/tests/testdata/math_algebra-v0-greedy_until +1 -0
scripts/yans/lm-evaluation-harness/tests/testdata/anli_r1-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"anli_r1": {"acc": 0.334, "acc_stderr": 0.014922019523732967}}, "versions": {"anli_r1": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/arithmetic_1dc-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"arithmetic_1dc": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"arithmetic_1dc": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_animate_subject_trans-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"blimp_animate_subject_trans": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_animate_subject_trans": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_2-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"blimp_determiner_noun_agreement_2": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_determiner_noun_agreement_2": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_irregular_1-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"blimp_determiner_noun_agreement_irregular_1": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_determiner_noun_agreement_irregular_1": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_irregular_2-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
ddb24ddfaebe076b3aa7107937d71bf5f4503a78283bc889e39200368603681e
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_determiner_noun_agreement_irregular_2-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"blimp_determiner_noun_agreement_irregular_2": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_determiner_noun_agreement_irregular_2": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_drop_argument-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"blimp_drop_argument": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_drop_argument": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_existential_there_quantifiers_2-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"blimp_existential_there_quantifiers_2": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_existential_there_quantifiers_2": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_left_branch_island_simple_question-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
6cb36bbdae7754f8832f50872c3dd511ce12547e00fa0771deb747be3355eb85
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_npi_present_2-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
fdb688ac6259bb65d234ef0a36e9a9ee449f9608f633b12e1943b462aead8e17
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_principle_A_case_1-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
49d2b8ce6667a6166fdc2a2e5dbe7ff07d9b8415e9f33482aef15956b3ebc24a
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_principle_A_case_2-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
cd68adb65c891d672e22bf53c054b2083ab08bc1da43951732b409c942d14bc7
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_principle_A_domain_1-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"blimp_principle_A_domain_1": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_principle_A_domain_1": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_principle_A_domain_3-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"blimp_principle_A_domain_3": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_principle_A_domain_3": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_superlative_quantifiers_1-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"blimp_superlative_quantifiers_1": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_superlative_quantifiers_1": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_tough_vs_raising_1-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"blimp_tough_vs_raising_1": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_tough_vs_raising_1": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_wh_questions_subject_gap_long_distance-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
37483dfda688b62ad27161c9fc1e1e7710c5a6e6a7cd3474df119bcafd30e97f
|
scripts/yans/lm-evaluation-harness/tests/testdata/blimp_wh_vs_that_with_gap-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"blimp_wh_vs_that_with_gap": {"acc": 0.485, "acc_stderr": 0.0158121796418149}}, "versions": {"blimp_wh_vs_that_with_gap": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/cb-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
ec3b1bbb9561e39c43c6f77a23b4060b15c606141c5346e3d0791b3e92aaa5d0
|
scripts/yans/lm-evaluation-harness/tests/testdata/cola-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"cola": {"mcc": -0.04538802810223175, "mcc_stderr": 0.023100371589225246}}, "versions": {"cola": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/coqa-v1-greedy_until
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
57581470b921435d40da97872bb1cfda6ecf963ccc4b0240a3b04e3fea8c8e3a
|
scripts/yans/lm-evaluation-harness/tests/testdata/crows_pairs_english_age-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
de74d2ac7f926f2f486c045d84aae8f71711102f9d77b31f758fd148810d13d3
|
scripts/yans/lm-evaluation-harness/tests/testdata/crows_pairs_english_physical_appearance-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"crows_pairs_english_physical_appearance": {"likelihood_difference": 0.3221673223187262, "likelihood_difference_stderr": 0.026978346460100555, "pct_stereotype": 0.4027777777777778, "pct_stereotype_stderr": 0.05820650942569533}}, "versions": {"crows_pairs_english_physical_appearance": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/crows_pairs_english_sexual_orientation-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
e754a309296b157677dfba6e6feef983d1ce38dd0169ae726265621a7b573163
|
scripts/yans/lm-evaluation-harness/tests/testdata/crows_pairs_french_sexual_orientation-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
2ce823fdb93d325aa8fb40db5d335b093b4b69792763532d940a752440ee3a76
|
scripts/yans/lm-evaluation-harness/tests/testdata/cycle_letters-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"cycle_letters": {"acc": 0.0, "acc_stderr": 0.0}}, "versions": {"cycle_letters": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/ethics_cm-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
92d136ebb2bd86cd036e61699ad9a1417dbb48651f0a3afa5045cf57cef5a3f6
|
scripts/yans/lm-evaluation-harness/tests/testdata/gguf_test_44e268d15decc4d2d0f99e57e1476269826cd3b54262f7a0981f75ddd45b25d0.pkl
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d672564bf8af93738531c26d02efda9ed844d1fdcdd619a06e37fce7ef6d8485
|
| 3 |
+
size 153
|
scripts/yans/lm-evaluation-harness/tests/testdata/gpt3_test_8025023377febbd8c5f2b9f26705c394ff375d0cad7c89c10fd9b8e1eb66ff1c.pkl
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:590805560ee790d530c075ad76633eb2e9749440083e0bab63489ff920fdfd33
|
| 3 |
+
size 70917
|
scripts/yans/lm-evaluation-harness/tests/testdata/headqa-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"headqa": {"acc": 0.23559445660102116, "acc_norm": 0.25018234865062, "acc_norm_stderr": 0.008272783230806014, "acc_stderr": 0.008105688874297972}}, "versions": {"headqa": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/headqa_es-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
767ca34d9714edd9fb030ddbcc35a64e5180d1e247b0cb557fbb22fdf971ad1f
|
scripts/yans/lm-evaluation-harness/tests/testdata/headqa_es-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"headqa_es": {"acc": 0.23559445660102116, "acc_norm": 0.25018234865062, "acc_norm_stderr": 0.008272783230806014, "acc_stderr": 0.008105688874297972}}, "versions": {"headqa_es": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-abstract_algebra-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"hendrycksTest-abstract_algebra": {"acc": 0.32, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235, "acc_stderr": 0.04688261722621504}}, "versions": {"hendrycksTest-abstract_algebra": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-electrical_engineering-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
b9b5d8b8bb02696302ec6bc2a99bf987a5504d3bae0e529d2c8f263538c97518
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_biology-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
d4dc051f37a49dc75c218741e87bc826fd44f31ee1309b55e0f33bd191c1bc78
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_biology-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"hendrycksTest-high_school_biology": {"acc": 0.23870967741935484, "acc_norm": 0.2709677419354839, "acc_norm_stderr": 0.025284416114900152, "acc_stderr": 0.024251071262208834}}, "versions": {"hendrycksTest-high_school_biology": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_computer_science-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"hendrycksTest-high_school_computer_science": {"acc": 0.2, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269, "acc_stderr": 0.04020151261036845}}, "versions": {"hendrycksTest-high_school_computer_science": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_european_history-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"hendrycksTest-high_school_european_history": {"acc": 0.23636363636363636, "acc_norm": 0.24242424242424243, "acc_norm_stderr": 0.03346409881055953, "acc_stderr": 0.033175059300091805}}, "versions": {"hendrycksTest-high_school_european_history": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-high_school_macroeconomics-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
ce4faae2fb6628caa48f6fc74cbc848880db49e6ff51079392778a2322bcefef
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-moral_disputes-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
d6ef028022c02b69d1516973e08bebaa14d8debcf2589a2bb124823178202d20
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-nutrition-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
19e49d218f55ed5ec4bd1a6cd3f3388c6f620b81484e7abe8b298e5481c3044d
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-prehistory-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
6983c560a562749f4f702249a3a6ae51fa495acc0643a980bf2cf52c6c5d4b95
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-professional_law-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"hendrycksTest-professional_law": {"acc": 0.2561929595827901, "acc_norm": 0.2470664928292047, "acc_norm_stderr": 0.011015752255279352, "acc_stderr": 0.011149173153110582}}, "versions": {"hendrycksTest-professional_law": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/hendrycksTest-professional_medicine-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"hendrycksTest-professional_medicine": {"acc": 0.23161764705882354, "acc_norm": 0.2536764705882353, "acc_norm_stderr": 0.02643132987078953, "acc_stderr": 0.025626533803777562}}, "versions": {"hendrycksTest-professional_medicine": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/lambada_mt_de-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"lambada_mt_de": {"acc": 0.0, "acc_stderr": 0.0, "ppl": 1.6479047769869253, "ppl_stderr": 0.006497321146240192}}, "versions": {"lambada_mt_de": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/lambada_mt_fr-v0-res.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"results": {"lambada_mt_fr": {"acc": 0.0, "acc_stderr": 0.0, "ppl": 1.6479047769869253, "ppl_stderr": 0.006497321146240192}}, "versions": {"lambada_mt_fr": 0}}
|
scripts/yans/lm-evaluation-harness/tests/testdata/lambada_standard_cloze-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
b604f00bc9f2a77ef41f8cfdb5a8509b3ae9266893b9e90abc665f5399ecba4e
|
scripts/yans/lm-evaluation-harness/tests/testdata/logiqa-v0-loglikelihood
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
12495c50454ba5e1ce0753bd18c09aaca516bebd27648d815e37b15229dbf198
|
scripts/yans/lm-evaluation-harness/tests/testdata/math_algebra-v0-greedy_until
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
f19182ce697a2c095d9e5b56ee6659dc38c93994b69ca75d7c3d3f5fd87572b4
|