Model_name
stringlengths 12
30
| Train_size
int64 50.8k
50.8k
| Test_size
int64 12.7k
12.7k
| lora
listlengths 2
8
| r
int64 12
64
| Trainable_parameters
int64 3.59M
52.2M
| Memory Allocation
float64 0.43
4.93
| accuracy
float64 0.9
90.8
| f1_macro
float64 0.89
90.3
| f1_weighted
float64 0.9
90.8
| precision
float64 0.89
90.3
| recall
float64 0.9
90.2
|
|---|---|---|---|---|---|---|---|---|---|---|---|
FacebookAI/roberta-large
| 50,775
| 12,652
|
[
"out_proj",
"value",
"dense",
"key",
"query"
] | 12
| 12,918,970
| 1.207656
| 90.246601
| 89.802879
| 90.260977
| 89.93768
| 89.700889
|
FacebookAI/xlm-roberta-large
| 50,775
| 12,652
|
[
"dense",
"out_proj"
] | 12
| 6,504,058
| 0.825117
| 89.977869
| 89.593698
| 89.995384
| 89.727738
| 89.497052
|
Qwen/Qwen3-Reranker-0.6B
| 50,775
| 12,652
|
[
"down_proj",
"gate_proj",
"o_proj",
"score",
"up_proj"
] | 12
| 9,978,802
| 1.703594
| 90.0411
| 89.583316
| 90.054603
| 89.763457
| 89.43125
|
RUCAIBox/mvp
| 50,775
| 12,652
|
[
"fc2",
"q_proj",
"out_proj",
"fc1",
"dense",
"v_proj",
"k_proj"
] | 12
| 17,195,578
| 1.770498
| 90.460006
| 90.053034
| 90.474028
| 90.179157
| 89.952785
|
answerdotai/ModernBERT-large
| 50,775
| 12,652
|
[
"Wi",
"Wo",
"classifier",
"dense"
] | 12
| 7,573,210
| 1.188389
| 90.460006
| 90.070314
| 90.474233
| 90.214053
| 89.94897
|
facebook/bart-large
| 50,775
| 12,652
|
[
"fc2",
"q_proj",
"out_proj",
"fc1",
"dense",
"v_proj",
"k_proj"
] | 12
| 17,195,578
| 1.690195
| 90.436295
| 90.019956
| 90.450537
| 90.173706
| 89.895379
|
facebook/opt-125m
| 50,775
| 12,652
|
[
"fc2",
"q_proj",
"out_proj",
"fc1",
"v_proj",
"k_proj",
"score"
] | 12
| 3,594,290
| 0.43123
| 89.029402
| 88.547062
| 89.039785
| 88.658871
| 88.453779
|
facebook/opt-350m
| 50,775
| 12,652
|
[
"fc1",
"fc2",
"out_proj",
"project_in",
"project_out",
"score"
] | 12
| 6,460,026
| 0.905674
| 89.464116
| 89.023809
| 89.476387
| 89.192131
| 88.899953
|
google-bert/bert-large-uncased
| 50,775
| 12,652
|
[
"classifier",
"dense"
] | 12
| 6,504,058
| 0.837207
| 89.61429
| 89.180331
| 89.623731
| 89.245118
| 89.128224
|
google-t5/t5-large
| 50,775
| 12,652
|
[
"wo",
"out_proj",
"q",
"wi",
"dense",
"k",
"v",
"o"
] | 12
| 34,302,010
| 3.54624
| 90.768258
| 90.266154
| 90.787784
| 90.340215
| 90.213506
|
Qwen/Qwen2-1.5B
| 50,775
| 12,652
|
[
"down_proj",
"gate_proj",
"o_proj",
"up_proj"
] | 32
| 8,429,568
| 1.617012
| 0.903889
| 0.899308
| 0.904411
| 0.896445
| 0.90323
|
Qwen/Qwen3-1.7B
| 50,775
| 12,652
|
[
"down_proj",
"gate_proj",
"o_proj",
"up_proj"
] | 16
| 29,589,504
| 2.122236
| 0.899937
| 0.894225
| 0.900583
| 0.891233
| 0.898903
|
facebook/opt-2.7b
| 50,775
| 12,652
|
[
"fc1",
"fc2",
"out_proj"
] | 32
| 19,906,560
| 2.394766
| 0.899542
| 0.893458
| 0.900166
| 0.890395
| 0.898157
|
tiiuae/Falcon3-3B-Base
| 50,775
| 12,652
|
[
"down_proj",
"gate_proj",
"o_proj",
"up_proj"
] | 16
| 52,174,848
| 1.645117
| 0.899146
| 0.893445
| 0.899648
| 0.890745
| 0.897115
|
Qwen/Qwen2.5-3B-Instruct
| 50,775
| 12,652
|
[
"down_proj",
"gate_proj",
"k_proj",
"o_proj",
"q_proj",
"up_proj",
"v_proj"
] | 64
| 12,257,280
| 3.259326
| 0.905469
| 0.90008
| 0.905995
| 0.897099
| 0.90432
|
Alibaba-NLP/E2Rank-4B
| 50,775
| 12,652
|
[
"down_proj",
"gate_proj",
"k_proj",
"o_proj",
"q_proj",
"up_proj",
"v_proj"
] | 64
| 20,146,176
| 4.926826
| 0.904916
| 0.899534
| 0.905499
| 0.896799
| 0.903723
|
Qwen/Qwen3-Embedding-4B
| 50,775
| 12,652
|
[
"down_proj",
"gate_proj",
"o_proj",
"up_proj"
] | 32
| 29,859,840
| 4.167988
| 0.90792
| 0.902632
| 0.908473
| 0.899744
| 0.906993
|
README.md exists but content is empty.
- Downloads last month
- 22