Add llama_finetune_swa_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=500_seed=123 LoRA model 5ad3260 verified mciccone commited on Jun 10, 2025
Add llama_finetune_swa_r16_alpha=32_dropout=0.05_lr5e-05_data_size1000_max_steps=500_seed=123 LoRA model 23e71c6 verified mciccone commited on Jun 10, 2025
Add llama_finetune_swa_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=500_seed=123 LoRA model 9fd911f verified mciccone commited on Jun 10, 2025
Add llama_finetune_swa_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=100_seed=123 LoRA model 3bbb53c verified mciccone commited on Jun 10, 2025
Add llama_finetune_swa_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=500_seed=123 LoRA model 21259e6 verified mciccone commited on Jun 10, 2025
Add llama_finetune_swa_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=100_seed=123 LoRA model 4735bd6 verified mciccone commited on Jun 10, 2025
Add llama_finetune_swa_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=100_seed=123 LoRA model 62aa2f0 verified mciccone commited on Jun 10, 2025