File size: 1,195 Bytes
062a37b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
# /// script
# requires-python = ">=3.10"
# dependencies = [
#     "lighteval>=0.6.0",
#     "torch>=2.0.0",
#     "transformers>=4.40.0",
#     "accelerate>=0.30.0",
# ]
# ///
"""Baseline: MMLU + GSM8K."""

import os, subprocess, glob

def main():
    hf_token = os.getenv("HF_TOKEN")
    if hf_token:
        os.environ.setdefault("HUGGING_FACE_HUB_TOKEN", hf_token)
        os.environ.setdefault("HF_HUB_TOKEN", hf_token)
    os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True"

    model_args = "model_name=LiquidAI/LFM2.5-1.2B-Instruct,trust_remote_code=True,dtype=float16,max_length=2048"
    tasks = "leaderboard|mmlu:abstract_algebra|5,leaderboard|mmlu:anatomy|5,leaderboard|mmlu:astronomy|5,leaderboard|mmlu:business_ethics|5,leaderboard|mmlu:clinical_knowledge|5,leaderboard|gsm8k|5"

    cmd = ["lighteval", "accelerate", model_args, tasks, "--output-dir", "/tmp/results"]
    print(f"Running: {' '.join(cmd)}")
    subprocess.run(cmd, check=True)
    print("DONE")

    for f in glob.glob("/tmp/results/**/*.json", recursive=True):
        print(f"\n=== {f} ===")
        with open(f) as fh:
            print(fh.read()[:10000])

if __name__ == "__main__":
    main()