zaaabik commited on
Commit
39b341d
·
verified ·
1 Parent(s): 8f646ae

Upload folder using huggingface_hub

Browse files
results_on_embeddings_prefix/grid_entropy_response_finetune/model_qwen_train/dataset_CoLa/seed_0/best_run.pickle CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:271412dc66d9df0a2c00ebd0e81124843393809a7976106989ced6f54be0779f
3
- size 97450
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8404da326522a6bc8d7fdb81f05c8be37211f168ea376adc2434a0422cef150
3
+ size 97458
results_on_embeddings_prefix/grid_entropy_response_finetune/model_qwen_train/dataset_CoLa/seed_0/best_run_metric_df.csv CHANGED
@@ -1,2 +1,2 @@
1
  ,model,adapter,dataset,train_on_dataset,seed,grid,lam,reg_alpha,l2sp_alpha,load_weights,lr,head_type,trial_number,valid-acc,valid-base-max-prob-roc-auc,valid-fine-tune-max-prob-roc-auc,test-acc,test-base-max-prob-roc-auc,test-fine-tune-max-prob-roc-auc
2
- 0,Qwen/Qwen2.5-7B,LoraConfig,cola,train,0,entropy_response_finetune,100.0,0.01,0.01,cls_head,0.0001,entropy,3,0.8573933243751526,0.7926277001128655,0.811771542235185,0.8418024778366089,0.8242976461655276,0.8517187823565956
 
1
  ,model,adapter,dataset,train_on_dataset,seed,grid,lam,reg_alpha,l2sp_alpha,load_weights,lr,head_type,trial_number,valid-acc,valid-base-max-prob-roc-auc,valid-fine-tune-max-prob-roc-auc,test-acc,test-base-max-prob-roc-auc,test-fine-tune-max-prob-roc-auc
2
+ 0,Qwen/Qwen2.5-7B,PrefixTuningConfig,cola,train,0,entropy_response_finetune,100.0,0.01,1.0,cls_head,0.0001,entropy,5,0.7101110219955444,0.6016485795831674,0.6125165936545864,0.693192720413208,0.5704702627939142,0.5691152316735824
results_on_embeddings_prefix/grid_entropy_response_finetune/model_qwen_train/dataset_SST5/seed_0/all_runs.pickle ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec0a6ccf9debf1c16781445c4b9106080d00478b0559469336db7c7b7b9711c8
3
+ size 5
results_on_embeddings_prefix/grid_entropy_response_finetune/model_qwen_train/dataset_SST5/seed_0/best_run.pickle ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4dbe8de8a0503ce84deda64e51128a282357360cdf0e26416e88af66acf089a1
3
+ size 252015
results_on_embeddings_prefix/grid_entropy_response_finetune/model_qwen_train/dataset_SST5/seed_0/best_run_metric_df.csv ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ ,model,adapter,dataset,train_on_dataset,seed,grid,lam,reg_alpha,l2sp_alpha,load_weights,lr,head_type,trial_number,valid-acc,valid-base-max-prob-roc-auc,valid-fine-tune-max-prob-roc-auc,test-acc,test-base-max-prob-roc-auc,test-fine-tune-max-prob-roc-auc
2
+ 0,Qwen/Qwen2.5-7B,PrefixTuningConfig,SST5,train,0,entropy_response_finetune,100.0,1.0,0.1,cls_head,0.0001,entropy,22,0.35785648226737976,0.5636133228986424,0.597049806503493,0.3601810038089752,0.5934747997412806,0.6301766079335859
results_on_embeddings_prefix/grid_entropy_response_finetune/model_qwen_train/dataset_ToxigenDataset/seed_0/all_runs.pickle ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec0a6ccf9debf1c16781445c4b9106080d00478b0559469336db7c7b7b9711c8
3
+ size 5
results_on_embeddings_prefix/grid_entropy_response_finetune/model_qwen_train/dataset_ToxigenDataset/seed_0/best_run.pickle ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be4f5bfc5ce247cca5eada411e96a26e42c05ddfb956c83117844efde51ee1d0
3
+ size 93749
results_on_embeddings_prefix/grid_entropy_response_finetune/model_qwen_train/dataset_ToxigenDataset/seed_0/best_run_metric_df.csv ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ ,model,adapter,dataset,train_on_dataset,seed,grid,lam,reg_alpha,l2sp_alpha,load_weights,lr,head_type,trial_number,valid-acc,valid-base-max-prob-roc-auc,valid-fine-tune-max-prob-roc-auc,test-acc,test-base-max-prob-roc-auc,test-fine-tune-max-prob-roc-auc
2
+ 0,Qwen/Qwen2.5-7B,PrefixTuningConfig,toxigen,train,0,entropy_response_finetune,100.0,1.0,0.01,cls_head,1e-05,entropy,24,0.6729910969734192,0.6597798832911663,0.6717776306182397,0.6372340321540833,0.6193068604076197,0.6312279997454213