Add llama_finetune_rte_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=500_seed=123 LoRA model e0c5d4f verified mciccone commited on Jun 10, 2025
Add llama_finetune_rte_r16_alpha=32_dropout=0.05_lr5e-05_data_size1000_max_steps=500_seed=123 LoRA model 88cd609 verified mciccone commited on Jun 10, 2025
Add llama_finetune_rte_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=500_seed=123 LoRA model 0cb94b8 verified mciccone commited on Jun 10, 2025
Add llama_finetune_rte_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=500_seed=123 LoRA model d65b76c verified mciccone commited on Jun 10, 2025
Add llama_finetune_rte_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=100_seed=123 LoRA model 97a1a90 verified mciccone commited on Jun 10, 2025
Add llama_finetune_rte_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=100_seed=123 LoRA model fde446f verified mciccone commited on Jun 10, 2025
Add llama_finetune_rte_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=100_seed=123 LoRA model b7df715 verified mciccone commited on Jun 10, 2025