Model_name
string | Train_size
int64 | Test_size
int64 | arg
dict | lora
list | Parameters
int64 | Trainable_parameters
int64 | r
int64 | Memory Allocation
string | Training Time
string | accuracy
float64 | f1_macro
float64 | f1_weighted
float64 | precision
float64 | recall
float64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
{
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-8,
"bf16": false,
"fp16": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 4,
"half_precision_backend": "auto",
"label_smoothing_factor": 0,
"learning_rate": 0.00005,
"lr_scheduler_type": "linear",
"max_grad_norm": 1,
"max_steps": -1,
"n_gpu": 2,
"num_train_epochs": 1,
"optim": "adamw_8bit",
"optim_args": "Not have",
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 8,
"warmup_ratio": 0,
"warmup_steps": 5,
"weight_decay": 0.01
}
|
[
"down_proj",
"gate_proj",
"o_proj",
"up_proj"
] | 4,034,384,896
| 12,567,040
| 8
|
3490.84
|
6764.89
| 0.894404
| 0.889038
| 0.89465
| 0.890801
| 0.887697
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
{
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-8,
"bf16": false,
"fp16": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 4,
"half_precision_backend": "auto",
"label_smoothing_factor": 0,
"learning_rate": 0.00005,
"lr_scheduler_type": "linear",
"max_grad_norm": 1,
"max_steps": -1,
"n_gpu": 2,
"num_train_epochs": 1,
"optim": "adamw_8bit",
"optim_args": "Not have",
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 8,
"warmup_ratio": 0,
"warmup_steps": 5,
"weight_decay": 0.01
}
|
[
"down_proj",
"gate_proj",
"o_proj",
"up_proj"
] | 4,046,918,656
| 25,100,800
| 16
|
3566.11
|
7598.53
| 0.899542
| 0.895435
| 0.899754
| 0.897327
| 0.893938
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
{
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-8,
"bf16": false,
"fp16": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 4,
"half_precision_backend": "auto",
"label_smoothing_factor": 0,
"learning_rate": 0.00005,
"lr_scheduler_type": "linear",
"max_grad_norm": 1,
"max_steps": -1,
"n_gpu": 2,
"num_train_epochs": 1,
"optim": "adamw_8bit",
"optim_args": "Not have",
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 8,
"warmup_ratio": 0,
"warmup_steps": 5,
"weight_decay": 0.01
}
|
[
"down_proj",
"gate_proj",
"o_proj",
"up_proj"
] | 4,071,986,176
| 50,168,320
| 32
|
3730.69
|
7642.08
| 0.900174
| 0.895563
| 0.900369
| 0.897199
| 0.894226
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
{
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-8,
"bf16": false,
"fp16": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 4,
"half_precision_backend": "auto",
"label_smoothing_factor": 0,
"learning_rate": 0.00005,
"lr_scheduler_type": "linear",
"max_grad_norm": 1,
"max_steps": -1,
"n_gpu": 2,
"num_train_epochs": 1,
"optim": "adamw_8bit",
"optim_args": "Not have",
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 8,
"warmup_ratio": 0,
"warmup_steps": 5,
"weight_decay": 0.01
}
|
[
"down_proj",
"gate_proj",
"o_proj",
"up_proj"
] | 4,122,121,216
| 100,303,360
| 64
|
4335.05
|
7786.72
| 0.904126
| 0.899359
| 0.904327
| 0.900997
| 0.898052
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
{
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-8,
"bf16": false,
"fp16": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 4,
"half_precision_backend": "auto",
"label_smoothing_factor": 0,
"learning_rate": 0.00005,
"lr_scheduler_type": "linear",
"max_grad_norm": 1,
"max_steps": -1,
"n_gpu": 2,
"num_train_epochs": 1,
"optim": "adamw_8bit",
"optim_args": "Not have",
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 8,
"warmup_ratio": 0,
"warmup_steps": 5,
"weight_decay": 0.01
}
|
[
"down_proj",
"gate_proj",
"o_proj",
"up_proj"
] | 4,222,391,296
| 200,573,440
| 128
|
5828.4
|
8047.66
| 0.906023
| 0.901803
| 0.906234
| 0.903212
| 0.900712
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
{
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-8,
"bf16": false,
"fp16": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 4,
"half_precision_backend": "auto",
"label_smoothing_factor": 0,
"learning_rate": 0.00005,
"lr_scheduler_type": "linear",
"max_grad_norm": 1,
"max_steps": -1,
"n_gpu": 2,
"num_train_epochs": 1,
"optim": "adamw_8bit",
"optim_args": "Not have",
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 8,
"warmup_ratio": 0,
"warmup_steps": 5,
"weight_decay": 0.01
}
|
[
"k_proj",
"q_proj",
"v_proj"
] | 4,025,832,448
| 4,014,592
| 8
|
2977.97
|
7578.4
| 0.868716
| 0.860003
| 0.8687
| 0.862192
| 0.858141
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
{
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-8,
"bf16": false,
"fp16": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 4,
"half_precision_backend": "auto",
"label_smoothing_factor": 0,
"learning_rate": 0.00005,
"lr_scheduler_type": "linear",
"max_grad_norm": 1,
"max_steps": -1,
"n_gpu": 2,
"num_train_epochs": 1,
"optim": "adamw_8bit",
"optim_args": "Not have",
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 8,
"warmup_ratio": 0,
"warmup_steps": 5,
"weight_decay": 0.01
}
|
[
"k_proj",
"q_proj",
"v_proj"
] | 4,029,813,760
| 7,995,904
| 16
|
931.62
|
7585.71
| 0.881363
| 0.874203
| 0.88159
| 0.875028
| 0.873631
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
{
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-8,
"bf16": false,
"fp16": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 4,
"half_precision_backend": "auto",
"label_smoothing_factor": 0,
"learning_rate": 0.00005,
"lr_scheduler_type": "linear",
"max_grad_norm": 1,
"max_steps": -1,
"n_gpu": 2,
"num_train_epochs": 1,
"optim": "adamw_8bit",
"optim_args": "Not have",
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 8,
"warmup_ratio": 0,
"warmup_steps": 5,
"weight_decay": 0.01
}
|
[
"k_proj",
"q_proj",
"v_proj"
] | 4,037,776,384
| 15,958,528
| 32
|
3109.14
|
7606.4
| 0.889425
| 0.883143
| 0.889576
| 0.884357
| 0.882212
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
{
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-8,
"bf16": false,
"fp16": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 4,
"half_precision_backend": "auto",
"label_smoothing_factor": 0,
"learning_rate": 0.00005,
"lr_scheduler_type": "linear",
"max_grad_norm": 1,
"max_steps": -1,
"n_gpu": 2,
"num_train_epochs": 1,
"optim": "adamw_8bit",
"optim_args": "Not have",
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 8,
"warmup_ratio": 0,
"warmup_steps": 5,
"weight_decay": 0.01
}
|
[
"k_proj",
"q_proj",
"v_proj"
] | 4,053,701,632
| 31,883,776
| 64
|
3335.13
|
7641.73
| 0.894325
| 0.888758
| 0.894486
| 0.889844
| 0.887984
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
{
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-8,
"bf16": false,
"fp16": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 4,
"half_precision_backend": "auto",
"label_smoothing_factor": 0,
"learning_rate": 0.00005,
"lr_scheduler_type": "linear",
"max_grad_norm": 1,
"max_steps": -1,
"n_gpu": 2,
"num_train_epochs": 1,
"optim": "adamw_8bit",
"optim_args": "Not have",
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 8,
"warmup_ratio": 0,
"warmup_steps": 5,
"weight_decay": 0.01
}
|
[
"k_proj",
"q_proj",
"v_proj"
] | 4,085,552,128
| 63,734,272
| 128
|
3881.7
|
7281.75
| 0.90049
| 0.895578
| 0.90069
| 0.896763
| 0.894777
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
{
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-8,
"bf16": false,
"fp16": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 4,
"half_precision_backend": "auto",
"label_smoothing_factor": 0,
"learning_rate": 0.00005,
"lr_scheduler_type": "linear",
"max_grad_norm": 1,
"max_steps": -1,
"n_gpu": 2,
"num_train_epochs": 1,
"optim": "adamw_8bit",
"optim_args": "Not have",
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 8,
"warmup_ratio": 0,
"warmup_steps": 5,
"weight_decay": 0.01
}
|
[
"down_proj",
"gate_proj",
"k_proj",
"o_proj",
"q_proj",
"up_proj",
"v_proj"
] | 4,038,366,208
| 16,548,352
| 8
|
4091.09
|
7798.63
| 0.894562
| 0.888925
| 0.894758
| 0.890242
| 0.887961
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
{
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-8,
"bf16": false,
"fp16": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 4,
"half_precision_backend": "auto",
"label_smoothing_factor": 0,
"learning_rate": 0.00005,
"lr_scheduler_type": "linear",
"max_grad_norm": 1,
"max_steps": -1,
"n_gpu": 2,
"num_train_epochs": 1,
"optim": "adamw_8bit",
"optim_args": "Not have",
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 8,
"warmup_ratio": 0,
"warmup_steps": 5,
"weight_decay": 0.01
}
|
[
"down_proj",
"gate_proj",
"k_proj",
"o_proj",
"q_proj",
"up_proj",
"v_proj"
] | 4,054,881,280
| 33,063,424
| 16
|
4180.43
|
7830.36
| 0.89804
| 0.892904
| 0.898199
| 0.894294
| 0.891799
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
{
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-8,
"bf16": false,
"fp16": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 4,
"half_precision_backend": "auto",
"label_smoothing_factor": 0,
"learning_rate": 0.00005,
"lr_scheduler_type": "linear",
"max_grad_norm": 1,
"max_steps": -1,
"n_gpu": 2,
"num_train_epochs": 1,
"optim": "adamw_8bit",
"optim_args": "Not have",
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 8,
"warmup_ratio": 0,
"warmup_steps": 5,
"weight_decay": 0.01
}
|
[
"down_proj",
"gate_proj",
"k_proj",
"o_proj",
"q_proj",
"up_proj",
"v_proj"
] | 4,087,911,424
| 66,093,568
| 32
|
4402.18
|
7865.51
| 0.902229
| 0.897552
| 0.902413
| 0.898839
| 0.896565
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
{
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-8,
"bf16": false,
"fp16": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 4,
"half_precision_backend": "auto",
"label_smoothing_factor": 0,
"learning_rate": 0.00005,
"lr_scheduler_type": "linear",
"max_grad_norm": 1,
"max_steps": -1,
"n_gpu": 2,
"num_train_epochs": 1,
"optim": "adamw_8bit",
"optim_args": "Not have",
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 8,
"warmup_ratio": 0,
"warmup_steps": 5,
"weight_decay": 0.01
}
|
[
"down_proj",
"gate_proj",
"k_proj",
"o_proj",
"q_proj",
"up_proj",
"v_proj"
] | 4,153,971,712
| 132,153,856
| 64
|
4834.18
|
7612.94
| 0.904521
| 0.900059
| 0.904643
| 0.901197
| 0.89925
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
{
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-8,
"bf16": false,
"fp16": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 4,
"half_precision_backend": "auto",
"label_smoothing_factor": 0,
"learning_rate": 0.00005,
"lr_scheduler_type": "linear",
"max_grad_norm": 1,
"max_steps": -1,
"n_gpu": 2,
"num_train_epochs": 1,
"optim": "adamw_8bit",
"optim_args": "Not have",
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 8,
"warmup_ratio": 0,
"warmup_steps": 5,
"weight_decay": 0.01
}
|
[
"down_proj",
"gate_proj",
"k_proj",
"o_proj",
"q_proj",
"up_proj",
"v_proj"
] | 4,286,092,288
| 264,274,432
| 128
|
6805.09
|
7941.72
| 0.906339
| 0.902009
| 0.906516
| 0.903233
| 0.901092
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.