Add llama_finetune_mmlu_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=100_seed=123 LoRA model bbda17c verified mciccone commited on Jun 10, 2025
Add llama_finetune_mmlu_r16_alpha=32_dropout=0.05_lr5e-05_data_size1000_max_steps=500_seed=123 LoRA model 7de3990 verified mciccone commited on Jun 10, 2025
Add llama_finetune_mmlu_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=500_seed=123 LoRA model 2aa6d6a verified mciccone commited on Jun 10, 2025
Add llama_finetune_mmlu_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=500_seed=123 LoRA model ae94546 verified mciccone commited on Jun 10, 2025
Add llama_finetune_mmlu_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=500_seed=123 LoRA model bab6921 verified mciccone commited on Jun 10, 2025
Add llama_finetune_mmlu_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=100_seed=123 LoRA model 3568740 verified mciccone commited on Jun 10, 2025
Add llama_finetune_mmlu_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=100_seed=123 LoRA model ffc04cd verified mciccone commited on Jun 10, 2025