Add llama_finetune_qnli_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=500_seed=123 LoRA model 16cb403 verified mciccone commited on Jun 10, 2025
Add llama_finetune_qnli_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=100_seed=123 LoRA model 8591aaa verified mciccone commited on Jun 10, 2025
Add llama_finetune_qnli_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=500_seed=123 LoRA model 8a667e2 verified mciccone commited on Jun 10, 2025
Add llama_finetune_qnli_r16_alpha=32_dropout=0.05_lr5e-05_data_size1000_max_steps=500_seed=123 LoRA model b5022a0 verified mciccone commited on Jun 10, 2025
Add llama_finetune_qnli_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=100_seed=123 LoRA model ea2019a verified mciccone commited on Jun 10, 2025
Add llama_finetune_qnli_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=500_seed=123 LoRA model f84e196 verified mciccone commited on Jun 10, 2025
Add llama_finetune_qnli_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=100_seed=123 LoRA model f0f1bf6 verified mciccone commited on Jun 10, 2025