Add llama_finetune_medmcqa_r16_alpha=32_dropout=0.05_lr5e-05_data_size1000_max_steps=500_seed=123 LoRA model 3c3996a verified mciccone commited on Jun 10, 2025
Add llama_finetune_medmcqa_r16_alpha=32_dropout=0.05_lr5e-06_data_size1000_max_steps=1000_seed=123 LoRA model bc7b3da verified mciccone commited on Jun 10, 2025
Add llama_finetune_medmcqa_r16_alpha=32_dropout=0.05_lr1e-05_data_size1000_max_steps=1000_seed=123 LoRA model b49da9f verified mciccone commited on Jun 10, 2025
Add llama_finetune_medmcqa_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=100_seed=123 LoRA model 47265dc verified mciccone commited on Jun 10, 2025
Add llama_finetune_medmcqa_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=100_seed=123 LoRA model 2c07a37 verified mciccone commited on Jun 10, 2025
Add llama_finetune_medmcqa_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=500_seed=123 LoRA model ba488a5 verified mciccone commited on Jun 10, 2025
Add llama_finetune_medmcqa_r16_alpha=32_dropout=0.05_lr1e-05_data_size1000_max_steps=500_seed=123 LoRA model 2069543 verified mciccone commited on Jun 10, 2025
Add llama_finetune_medmcqa_r16_alpha=32_dropout=0.05_lr0.0003_data_size1000_max_steps=500_seed=123 LoRA model cc351c3 verified mciccone commited on Jun 10, 2025
Add llama_finetune_medmcqa_r16_alpha=32_dropout=0.05_lr5e-06_data_size1000_max_steps=500_seed=123 LoRA model 33b7ba3 verified mciccone commited on Jun 10, 2025
Add llama_finetune_medmcqa_r16_alpha=32_dropout=0.05_lr0.0001_data_size1000_max_steps=100_seed=123 LoRA model 2901976 verified mciccone commited on Jun 10, 2025
Add llama_finetune_medmcqa_r16_alpha=32_dropout=0.05_lr0.0002_data_size1000_max_steps=500_seed=123 LoRA model 9c1d453 verified mciccone commited on Jun 10, 2025