Model_name
stringlengths 12
30
| Train_size
int64 50.8k
50.8k
| Test_size
int64 12.7k
12.7k
| lora
listlengths 2
9
| r
int64 64
128
| Trainable_parameters
int64 21.2M
264M
| Memory Allocation
float64 0.62
5.63
| accuracy
float64 0.8
89.9
| f1_macro
float64 0.79
89.5
| f1_weighted
float64 0.82
89.9
| precision
float64 0.81
89.6
| recall
float64 0.82
89.5
|
|---|---|---|---|---|---|---|---|---|---|---|---|
FacebookAI/roberta-large
| 50,775
| 12,652
|
[
"dense",
"value",
"out_proj",
"key",
"query"
] | 128
| 57,686,029
| 1.220957
| 89.89883
| 89.534963
| 89.911048
| 89.576317
| 89.508516
|
FacebookAI/xlm-roberta-large
| 50,775
| 12,652
|
[
"dense",
"value",
"out_proj",
"key",
"query"
] | 80
| 36,452,365
| 1.102002
| 89.685425
| 89.260193
| 89.70282
| 89.401579
| 89.144042
|
Qwen/Qwen3-Reranker-0.6B
| 50,775
| 12,652
|
[
"down_proj",
"gate_proj",
"o_proj",
"score",
"up_proj"
] | 96
| 41,300,992
| 4.138467
| 88.966171
| 88.356148
| 88.985935
| 88.46872
| 88.281362
|
RUCAIBox/mvp
| 50,775
| 12,652
|
[
"fc1",
"dense",
"k_proj",
"out_proj",
"q_proj",
"fc2",
"v_proj"
] | 128
| 69,600,896
| 1.578633
| 89.685425
| 89.191071
| 89.700643
| 89.293141
| 89.107709
|
answerdotai/ModernBERT-large
| 50,775
| 12,652
|
[
"classifier",
"Wqkv",
"dense",
"Wo",
"Wi"
] | 112
| 50,619,405
| 1.377988
| 89.574771
| 88.993892
| 89.587722
| 89.168306
| 88.847172
|
facebook/bart-large
| 50,775
| 12,652
|
[
"v_proj",
"fc2",
"out_proj",
"fc1",
"dense",
"q_proj",
"k_proj"
] | 128
| 69,600,896
| 1.927803
| 89.693329
| 89.205372
| 89.704545
| 89.288181
| 89.150289
|
facebook/opt-125m
| 50,775
| 12,652
|
[
"v_proj",
"fc2",
"out_proj",
"score",
"fc1",
"q_proj",
"k_proj"
] | 128
| 21,243,648
| 0.615615
| 88.318052
| 87.702751
| 88.341254
| 87.74554
| 87.682287
|
facebook/opt-350m
| 50,775
| 12,652
|
[
"v_proj",
"fc2",
"out_proj",
"score",
"project_in",
"fc1",
"q_proj",
"k_proj",
"project_out"
] | 128
| 57,022,976
| 1.611289
| 89.037306
| 88.618297
| 89.044621
| 88.733269
| 88.516525
|
google-bert/bert-large-uncased
| 50,775
| 12,652
|
[
"classifier",
"dense"
] | 128
| 38,024,205
| 1.160762
| 88.721151
| 88.122461
| 88.749407
| 88.242394
| 88.043566
|
google-t5/t5-large
| 50,775
| 12,652
|
[
"wi",
"out_proj",
"dense",
"o",
"wo",
"v",
"q",
"k"
] | 128
| 138,806,912
| 3.944111
| 89.037306
| 88.276916
| 89.050665
| 88.603012
| 88.031521
|
Qwen/Qwen2-1.5B
| 50,775
| 12,652
|
[
"down_proj",
"gate_proj",
"k_proj",
"o_proj",
"q_proj",
"up_proj",
"v_proj"
] | 128
| 147,738,112
| 2.874775
| 0.866029
| 0.853751
| 0.870671
| 0.852995
| 0.871274
|
Qwen/Qwen3-1.7B
| 50,775
| 12,652
|
[
"down_proj",
"gate_proj",
"k_proj",
"o_proj",
"q_proj",
"up_proj",
"v_proj"
] | 128
| 139,487,232
| 2.846445
| 0.813705
| 0.803528
| 0.818697
| 0.805926
| 0.827676
|
facebook/opt-2.7b
| 50,775
| 12,652
|
[
"fc1",
"fc2",
"k_proj",
"out_proj",
"q_proj",
"v_proj"
] | 64
| 94,405,120
| 2.932852
| 0.80343
| 0.792611
| 0.817475
| 0.813736
| 0.818266
|
tiiuae/Falcon3-3B-Base
| 50,775
| 12,652
|
[
"down_proj",
"gate_proj",
"o_proj",
"up_proj"
] | 128
| 121,150,464
| 2.625752
| 0.842792
| 0.829756
| 0.848151
| 0.830768
| 0.849633
|
Qwen/Qwen2.5-3B-Instruct
| 50,775
| 12,652
|
[
"down_proj",
"gate_proj",
"o_proj",
"up_proj"
] | 128
| 199,387,136
| 3.926855
| 0.865476
| 0.85332
| 0.87013
| 0.852328
| 0.871832
|
Qwen/Qwen3-Embedding-4B
| 50,775
| 12,652
|
[
"down_proj",
"gate_proj",
"o_proj",
"up_proj"
] | 128
| 200,573,440
| 4.614141
| 0.847297
| 0.835933
| 0.853349
| 0.838054
| 0.855377
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
[
"down_proj",
"gate_proj",
"k_proj",
"o_proj",
"q_proj",
"up_proj",
"v_proj"
] | 128
| 264,274,432
| 5.630898
| 0.848482
| 0.836983
| 0.853935
| 0.837632
| 0.856989
|
README.md exists but content is empty.
- Downloads last month
- 24