Model_name stringclasses 16 values | Train_size int64 50.8k 50.8k | Test_size int64 12.7k 12.7k | arg dict | lora listlengths 1 9 | Parameters int64 110M 1.85B | Trainable_parameters int64 9.27k 1.11B | r int64 4 1.02k | Memory Allocation stringlengths 5 7 | Training Time stringlengths 5 7 | Performance dict |
|---|---|---|---|---|---|---|---|---|---|---|
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 125,698,586 | 1,042,957 | 4 | 1379.61 | 516.83 | {
"accuracy": 0.8465855200758773,
"f1_macro": 0.8340490920495849,
"f1_weighted": 0.8465504941446558,
"precision": 0.8402862114476798,
"recall": 0.8301870667970347
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 126,140,954 | 1,485,325 | 8 | 1375.29 | 539.19 | {
"accuracy": 0.8593898197913373,
"f1_macro": 0.8494724159910976,
"f1_weighted": 0.8595404905204881,
"precision": 0.8537412420731172,
"recall": 0.8464116130684702
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 127,025,690 | 2,370,061 | 16 | 1379.01 | 524.26 | {
"accuracy": 0.8689535251343661,
"f1_macro": 0.860226577406406,
"f1_weighted": 0.8691191048708272,
"precision": 0.8630581563058186,
"recall": 0.8579840198270493
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 128,795,162 | 4,139,533 | 32 | 1405.63 | 522.2 | {
"accuracy": 0.873300663926652,
"f1_macro": 0.8652518906400283,
"f1_weighted": 0.8734504853470856,
"precision": 0.8671457037466545,
"recall": 0.8637183716416306
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 132,334,106 | 7,678,477 | 64 | 1470.14 | 538.22 | {
"accuracy": 0.879544736010117,
"f1_macro": 0.8717421988175561,
"f1_weighted": 0.8796959266260036,
"precision": 0.8729376208017359,
"recall": 0.8708070546286392
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 139,411,994 | 14,756,365 | 128 | 1474.88 | 564.83 | {
"accuracy": 0.8837337970281378,
"f1_macro": 0.8764199863238034,
"f1_weighted": 0.8838460171049024,
"precision": 0.8775010379877884,
"recall": 0.8755335295237698
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"fc1",
"fc2",
"out_proj"
] | 408,644,673 | 1,290,292 | 4 | 3792.74 | 1765.12 | {
"accuracy": 0.8254030983243756,
"f1_macro": 0.7942233737452272,
"f1_weighted": 0.8210719850322569,
"precision": 0.8155352712384848,
"recall": 0.7892248972999769
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 357,615,642 | 2,242,573 | 4 | 3299.3 | 1456.24 | {
"accuracy": 0.8762251027505533,
"f1_macro": 0.8692030515942923,
"f1_weighted": 0.8765679781573217,
"precision": 0.8711681710386169,
"recall": 0.8678851272646673
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 153,567,770 | 28,912,141 | 256 | 1649.39 | 622.11 | {
"accuracy": 0.8876857413847613,
"f1_macro": 0.8811070927831653,
"f1_weighted": 0.8877647364230433,
"precision": 0.8822983580334766,
"recall": 0.8800895685744806
} |
FacebookAI/xlm-roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 562,146,330 | 2,242,573 | 4 | 4935.54 | 2021.03 | {
"accuracy": 0.8725893139424596,
"f1_macro": 0.8648132854936866,
"f1_weighted": 0.8727950240317498,
"precision": 0.867549548960421,
"recall": 0.8626542247550917
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 109,950,746 | 458,509 | 4 | 1248.45 | 503.85 | {
"accuracy": 0.6804457793234271,
"f1_macro": 0.5831976548071854,
"f1_weighted": 0.6489669753037608,
"precision": 0.6552885170242525,
"recall": 0.6076809656621555
} |
google/rembert | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense",
"embedding_hidden_mapping_in"
] | 577,734,682 | 1,799,309 | 4 | 5007.36 | 2625.72 | {
"accuracy": 0.8695858362314258,
"f1_macro": 0.8589912882110775,
"f1_weighted": 0.8695841347189455,
"precision": 0.8637437350730561,
"recall": 0.855559709905021
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 110,399,258 | 907,021 | 8 | 1244.82 | 501.33 | {
"accuracy": 0.7468384445147013,
"f1_macro": 0.6780446340268369,
"f1_weighted": 0.7257716525873043,
"precision": 0.698242661197289,
"recall": 0.6866301155276309
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 181,879,322 | 57,223,693 | 512 | 1948.99 | 745.42 | {
"accuracy": 0.89084729687006,
"f1_macro": 0.884702784129353,
"f1_weighted": 0.8909290679599178,
"precision": 0.8857158320594325,
"recall": 0.883844733737056
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 358,795,290 | 3,422,221 | 8 | 3308.5 | 1466.79 | {
"accuracy": 0.8821530192854885,
"f1_macro": 0.8763401875319281,
"f1_weighted": 0.8825445854181906,
"precision": 0.877769674405757,
"recall": 0.8753882277725543
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 111,296,282 | 1,804,045 | 16 | 1248.79 | 479.4 | {
"accuracy": 0.7977394878280114,
"f1_macro": 0.7523181078518651,
"f1_weighted": 0.7862311782799655,
"precision": 0.7982886603247584,
"recall": 0.7532144822533675
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"fc1",
"fc2",
"out_proj"
] | 409,934,965 | 2,580,584 | 8 | 3804.75 | 1765.81 | {
"accuracy": 0.8494309200126462,
"f1_macro": 0.8324264851760286,
"f1_weighted": 0.8484212760977864,
"precision": 0.8410424107713527,
"recall": 0.8278077509740616
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 113,090,330 | 3,598,093 | 32 | 1274.29 | 503.21 | {
"accuracy": 0.8400252924438824,
"f1_macro": 0.8241649565993185,
"f1_weighted": 0.838801434313264,
"precision": 0.837904148263822,
"recall": 0.8168347742360932
} |
FacebookAI/xlm-roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 563,325,978 | 3,422,221 | 8 | 4944.75 | 2016.25 | {
"accuracy": 0.8814416693012962,
"f1_macro": 0.8750724802105493,
"f1_weighted": 0.8815682095432281,
"precision": 0.8771349816062244,
"recall": 0.8733434004724216
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 116,678,426 | 7,186,189 | 64 | 1341.59 | 523.13 | {
"accuracy": 0.856307303193171,
"f1_macro": 0.8445821685599258,
"f1_weighted": 0.8558074261383034,
"precision": 0.8532108533533793,
"recall": 0.8394980941298701
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 361,154,586 | 5,781,517 | 16 | 3335.47 | 1467.59 | {
"accuracy": 0.8860259247549794,
"f1_macro": 0.8805908113119117,
"f1_weighted": 0.8862769058797022,
"precision": 0.8815056148370751,
"recall": 0.8799666252638331
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 123,854,618 | 14,362,381 | 128 | 1348.89 | 550.71 | {
"accuracy": 0.8682421751501739,
"f1_macro": 0.8607836525623542,
"f1_weighted": 0.8681785403418177,
"precision": 0.8650231484647595,
"recall": 0.8575399385906514
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"fc1",
"fc2",
"out_proj"
] | 412,515,549 | 5,161,168 | 16 | 3836.41 | 1775.46 | {
"accuracy": 0.8675308251659817,
"f1_macro": 0.8569831684700908,
"f1_weighted": 0.8675100017298956,
"precision": 0.8611393290578584,
"recall": 0.8542100346505586
} |
google/rembert | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense",
"embedding_hidden_mapping_in"
] | 579,519,002 | 3,583,629 | 8 | 5026.7 | 2631.96 | {
"accuracy": 0.8827853303825483,
"f1_macro": 0.8758300378165708,
"f1_weighted": 0.8829383871624759,
"precision": 0.87862067881592,
"recall": 0.8735385090229185
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 138,207,002 | 28,714,765 | 256 | 1518.54 | 612.14 | {
"accuracy": 0.8768574138476131,
"f1_macro": 0.8711695243461549,
"f1_weighted": 0.8768884317753117,
"precision": 0.8737841851778931,
"recall": 0.8690053543037654
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 365,873,178 | 10,500,109 | 32 | 3414.77 | 1477.69 | {
"accuracy": 0.8906892190957951,
"f1_macro": 0.8860021264947283,
"f1_weighted": 0.8909334482337231,
"precision": 0.8868374971079175,
"recall": 0.8854509207788522
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 125,477,402 | 821,773 | 4 | 1156.22 | 550.26 | {
"accuracy": 0.8340183370218147,
"f1_macro": 0.8198675525987951,
"f1_weighted": 0.8337764478041396,
"precision": 0.8259594334694711,
"recall": 0.8159187392243591
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 166,911,770 | 57,419,533 | 512 | 1841.63 | 733.87 | {
"accuracy": 0.882232058172621,
"f1_macro": 0.8773384152398953,
"f1_weighted": 0.8823046399232333,
"precision": 0.8795561471893543,
"recall": 0.8755108966229024
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 336,356,378 | 1,201,165 | 4 | 729.05 | 555.38 | {
"accuracy": 0.815523237432817,
"f1_macro": 0.7794404234917954,
"f1_weighted": 0.8089604529608558,
"precision": 0.8165454155559532,
"recall": 0.7735023676594606
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 125,698,586 | 1,042,957 | 8 | 1150.03 | 536.72 | {
"accuracy": 0.8510907366424281,
"f1_macro": 0.8400420874226526,
"f1_weighted": 0.8511327945816213,
"precision": 0.8457216333479073,
"recall": 0.8361739028568304
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"fc1",
"fc2",
"out_proj"
] | 417,676,717 | 10,322,336 | 32 | 3916.76 | 1790.55 | {
"accuracy": 0.8758299083148909,
"f1_macro": 0.8668049587329789,
"f1_weighted": 0.8758603217556982,
"precision": 0.8697313514600873,
"recall": 0.8646167954835591
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 337,544,218 | 2,389,005 | 8 | 719.58 | 554.83 | {
"accuracy": 0.8619190641795763,
"f1_macro": 0.8523901695902638,
"f1_weighted": 0.862036251000165,
"precision": 0.8565319337706493,
"recall": 0.8494140859579764
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 126,140,954 | 1,485,325 | 16 | 1149.65 | 536.95 | {
"accuracy": 0.8595478975656022,
"f1_macro": 0.8495386110208438,
"f1_weighted": 0.8596940655110615,
"precision": 0.8539706656018181,
"recall": 0.8463705209445991
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 375,310,362 | 19,937,293 | 64 | 3575.49 | 1522.37 | {
"accuracy": 0.8940088523553589,
"f1_macro": 0.8896207971222672,
"f1_weighted": 0.8941604025617136,
"precision": 0.8906132689531988,
"recall": 0.8888308789628273
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 339,919,898 | 4,764,685 | 16 | 734.77 | 557.47 | {
"accuracy": 0.8700600695542207,
"f1_macro": 0.8629267469057272,
"f1_weighted": 0.8703546878001558,
"precision": 0.8662414806607376,
"recall": 0.8604568138886367
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 109,944,602 | 452,365 | 8 | 1010.89 | 495.67 | {
"accuracy": 0.5833069870376225,
"f1_macro": 0.47480777422523524,
"f1_weighted": 0.5335707257009746,
"precision": 0.503493398959869,
"recall": 0.5156260517847151
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 127,025,690 | 2,370,061 | 32 | 1175.96 | 538.72 | {
"accuracy": 0.8669775529560544,
"f1_macro": 0.8582782106285267,
"f1_weighted": 0.8671392295134541,
"precision": 0.8613026321631628,
"recall": 0.8559757886864787
} |
google/rembert | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense",
"embedding_hidden_mapping_in"
] | 583,087,642 | 7,152,269 | 16 | 5074.65 | 2648.51 | {
"accuracy": 0.89084729687006,
"f1_macro": 0.8852825636698043,
"f1_weighted": 0.8910752417418889,
"precision": 0.887225855993687,
"recall": 0.8836901840064006
} |
FacebookAI/xlm-roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 570,403,866 | 10,500,109 | 32 | 5042.86 | 2045.0 | {
"accuracy": 0.8910844135314575,
"f1_macro": 0.8854909910992065,
"f1_weighted": 0.8912291024968251,
"precision": 0.887047946004157,
"recall": 0.8841939914729813
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 110,386,970 | 894,733 | 16 | 1018.89 | 496.81 | {
"accuracy": 0.6585520075877331,
"f1_macro": 0.5583557193327258,
"f1_weighted": 0.618386260774831,
"precision": 0.625158519422246,
"recall": 0.5892689860021918
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 128,795,162 | 4,139,533 | 64 | 1175.42 | 544.46 | {
"accuracy": 0.8714037306354726,
"f1_macro": 0.8629328315448218,
"f1_weighted": 0.8714950054507163,
"precision": 0.8654034306031588,
"recall": 0.860973347921311
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 344,671,258 | 9,516,045 | 32 | 765.14 | 563.24 | {
"accuracy": 0.8773316471704078,
"f1_macro": 0.8708971632647742,
"f1_weighted": 0.8775584683512808,
"precision": 0.8730965955606386,
"recall": 0.8692390386775077
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 111,271,706 | 1,779,469 | 32 | 1042.97 | 498.7 | {
"accuracy": 0.7338760670249763,
"f1_macro": 0.6800897675258101,
"f1_weighted": 0.7154343448169183,
"precision": 0.7471161150451973,
"recall": 0.6864654554372182
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 132,334,106 | 7,678,477 | 128 | 1212.37 | 555.17 | {
"accuracy": 0.8763831805248182,
"f1_macro": 0.8685086985495007,
"f1_weighted": 0.8764768753528103,
"precision": 0.8706212561474461,
"recall": 0.86678235158233
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"fc1",
"fc2",
"out_proj"
] | 427,999,053 | 20,644,672 | 64 | 4107.01 | 1845.35 | {
"accuracy": 0.8829434081568132,
"f1_macro": 0.8760990330726381,
"f1_weighted": 0.8830823425417631,
"precision": 0.878388529941251,
"recall": 0.874255558547274
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 354,173,978 | 19,018,765 | 64 | 825.99 | 584.63 | {
"accuracy": 0.880730319317104,
"f1_macro": 0.8745132364205285,
"f1_weighted": 0.8808686332353639,
"precision": 0.8761351934239846,
"recall": 0.8732603833811635
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 113,041,178 | 3,548,941 | 64 | 1039.2 | 506.87 | {
"accuracy": 0.7899936768890294,
"f1_macro": 0.76085900959247,
"f1_weighted": 0.7837586524219583,
"precision": 0.7904333025476757,
"recall": 0.7563708284429409
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 394,184,730 | 38,811,661 | 128 | 3649.15 | 1618.89 | {
"accuracy": 0.89756560227632,
"f1_macro": 0.8931155672712718,
"f1_weighted": 0.8977212994148122,
"precision": 0.8940294746357065,
"recall": 0.8924113005747072
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 139,411,994 | 14,756,365 | 256 | 1347.02 | 589.18 | {
"accuracy": 0.8800980082200442,
"f1_macro": 0.8728568319907553,
"f1_weighted": 0.8802014973485958,
"precision": 0.8744470212702262,
"recall": 0.8716389976774788
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 373,179,418 | 38,024,205 | 128 | 950.71 | 618.54 | {
"accuracy": 0.8842080303509326,
"f1_macro": 0.8784351126961792,
"f1_weighted": 0.884335007759456,
"precision": 0.8796394785271359,
"recall": 0.8775049888168863
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 116,580,122 | 7,087,885 | 128 | 1079.39 | 499.2 | {
"accuracy": 0.8262725260828327,
"f1_macro": 0.8044735145506146,
"f1_weighted": 0.8236107619440158,
"precision": 0.8181056541984145,
"recall": 0.7992784227671461
} |
FacebookAI/roberta-base | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 153,567,770 | 28,912,141 | 512 | 1549.04 | 642.82 | {
"accuracy": 0.8823110970597534,
"f1_macro": 0.8752709699074926,
"f1_weighted": 0.8824236522894506,
"precision": 0.8759907367639677,
"recall": 0.8747700003957696
} |
FacebookAI/xlm-roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 579,841,050 | 19,937,293 | 64 | 5211.23 | 2092.91 | {
"accuracy": 0.8938507745810939,
"f1_macro": 0.8889839784997207,
"f1_weighted": 0.8939976610165367,
"precision": 0.8907862703375223,
"recall": 0.8874784323164315
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 123,658,010 | 14,165,773 | 256 | 1214.8 | 539.47 | {
"accuracy": 0.8452418589946253,
"f1_macro": 0.8309024960657764,
"f1_weighted": 0.8445499077974197,
"precision": 0.8380230325734844,
"recall": 0.8264247931667552
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 411,190,298 | 76,035,085 | 256 | 1209.98 | 696.93 | {
"accuracy": 0.8861840025292443,
"f1_macro": 0.8805852219073909,
"f1_weighted": 0.8862916923417463,
"precision": 0.8815998520714247,
"recall": 0.8798811297517187
} |
google/rembert | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense",
"embedding_hidden_mapping_in"
] | 590,224,922 | 14,289,549 | 32 | 5184.43 | 2675.98 | {
"accuracy": 0.8965380967435979,
"f1_macro": 0.8916888027728069,
"f1_weighted": 0.8967334029305609,
"precision": 0.8939975604190971,
"recall": 0.8897654555741151
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"fc1",
"fc2",
"out_proj"
] | 448,643,725 | 41,289,344 | 128 | 4188.89 | 1954.45 | {
"accuracy": 0.8895036357888081,
"f1_macro": 0.8833073806422669,
"f1_weighted": 0.8896636096198612,
"precision": 0.884459853381671,
"recall": 0.8823866083639367
} |
google-bert/bert-base-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 137,813,786 | 28,321,549 | 512 | 1426.05 | 605.12 | {
"accuracy": 0.85883654758141,
"f1_macro": 0.848513365806343,
"f1_weighted": 0.8586877233616093,
"precision": 0.8537204028707317,
"recall": 0.8448018864192982
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 431,933,466 | 76,560,397 | 256 | 4148.11 | 1794.14 | {
"accuracy": 0.8996206133417641,
"f1_macro": 0.8952887187406523,
"f1_weighted": 0.8997428572588272,
"precision": 0.8962920572695677,
"recall": 0.894464186628896
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense"
] | 487,212,058 | 152,056,845 | 512 | 1703.09 | 864.14 | {
"accuracy": 0.8863420803035094,
"f1_macro": 0.8809090607240366,
"f1_weighted": 0.8864747176495101,
"precision": 0.8811480559502635,
"recall": 0.8809128972730303
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 335,758,362 | 603,149 | 4 | 917.44 | 546.92 | {
"accuracy": 0.887132469174834,
"f1_macro": 0.8817187487987473,
"f1_weighted": 0.8872365394026598,
"precision": 0.8824020330476059,
"recall": 0.8812280581703731
} |
answerdotai/ModernBERT-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"Wqkv"
] | 396,316,698 | 472,077 | 4 | 1834.11 | 786.35 | {
"accuracy": 0.7903098324375593,
"f1_macro": 0.7672491813607067,
"f1_weighted": 0.7886414085185387,
"precision": 0.7749200809729936,
"recall": 0.7629213053908442
} |
FacebookAI/xlm-roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 598,715,418 | 38,811,661 | 128 | 5282.39 | 2191.98 | {
"accuracy": 0.896380018969333,
"f1_macro": 0.8918873284178506,
"f1_weighted": 0.8965179629653568,
"precision": 0.8931920068877847,
"recall": 0.8908658698341267
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 336,348,186 | 1,192,973 | 8 | 573.23 | 547.32 | {
"accuracy": 0.8872905469490989,
"f1_macro": 0.8820272426764626,
"f1_weighted": 0.8873896526508074,
"precision": 0.8828409744012747,
"recall": 0.8813973789041893
} |
answerdotai/ModernBERT-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"Wqkv"
] | 396,775,450 | 930,829 | 8 | 1840.15 | 789.62 | {
"accuracy": 0.8280904204868795,
"f1_macro": 0.8127439252127046,
"f1_weighted": 0.8277441204563218,
"precision": 0.8170094648812616,
"recall": 0.8095579199545097
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 337,527,834 | 2,372,621 | 16 | 581.2 | 548.24 | {
"accuracy": 0.8875276636104964,
"f1_macro": 0.8822620778352918,
"f1_weighted": 0.8876157933246032,
"precision": 0.8830131811926824,
"recall": 0.881689667346855
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"fc1",
"fc2",
"out_proj"
] | 489,933,069 | 82,578,688 | 256 | 4775.35 | 2149.02 | {
"accuracy": 0.8957477078722732,
"f1_macro": 0.8909291187592895,
"f1_weighted": 0.8958900440066224,
"precision": 0.8917778185527342,
"recall": 0.8902739783190025
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 507,430,938 | 152,057,869 | 512 | 5208.02 | 2154.21 | {
"accuracy": 0.9023869743914006,
"f1_macro": 0.8975241027323866,
"f1_weighted": 0.902511886689775,
"precision": 0.8983383090095226,
"recall": 0.8968883767529492
} |
google/rembert | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense",
"embedding_hidden_mapping_in"
] | 604,499,482 | 28,564,109 | 64 | 5410.55 | 2756.43 | {
"accuracy": 0.8986721466961746,
"f1_macro": 0.8942493548634695,
"f1_weighted": 0.8988471414290032,
"precision": 0.8961804074671618,
"recall": 0.8926048577899264
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 339,887,130 | 4,731,917 | 32 | 597.12 | 551.81 | {
"accuracy": 0.8875276636104964,
"f1_macro": 0.8822690294260441,
"f1_weighted": 0.8876324867403612,
"precision": 0.8830941233036421,
"recall": 0.8816375818813812
} |
answerdotai/ModernBERT-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"Wqkv"
] | 397,692,954 | 1,848,333 | 16 | 1854.29 | 814.45 | {
"accuracy": 0.8442933923490358,
"f1_macro": 0.8317764391177607,
"f1_weighted": 0.8440530174431455,
"precision": 0.835826797307839,
"recall": 0.8286484142313467
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 344,605,722 | 9,450,509 | 64 | 629.12 | 559.8 | {
"accuracy": 0.887132469174834,
"f1_macro": 0.8818528902639416,
"f1_weighted": 0.8872188116550366,
"precision": 0.8824483754035293,
"recall": 0.8814262519162981
} |
answerdotai/ModernBERT-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"Wqkv"
] | 399,527,962 | 3,683,341 | 32 | 1890.68 | 792.05 | {
"accuracy": 0.8579671198229529,
"f1_macro": 0.8475435628380684,
"f1_weighted": 0.8578404191364576,
"precision": 0.8507979439495266,
"recall": 0.8449504002910871
} |
FacebookAI/xlm-roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 636,464,154 | 76,560,397 | 256 | 5783.89 | 2368.27 | {
"accuracy": 0.8991463800189693,
"f1_macro": 0.8943440596520253,
"f1_weighted": 0.8992907008437808,
"precision": 0.8956325153176388,
"recall": 0.8932812078131077
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 357,025,818 | 1,652,749 | 4 | 3283.36 | 1447.14 | {
"accuracy": 0.866740436294657,
"f1_macro": 0.8575637104818205,
"f1_weighted": 0.8669708348834821,
"precision": 0.8608586794600368,
"recall": 0.8551682938094264
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 354,042,906 | 18,887,693 | 128 | 693.11 | 572.79 | {
"accuracy": 0.8870534302877016,
"f1_macro": 0.8818615284745714,
"f1_weighted": 0.8871316748564163,
"precision": 0.8823367797181898,
"recall": 0.8815199668222549
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 372,917,274 | 37,762,061 | 256 | 821.11 | 609.05 | {
"accuracy": 0.885946885867847,
"f1_macro": 0.8812526997751445,
"f1_weighted": 0.8859959148203225,
"precision": 0.8816106503696065,
"recall": 0.881007863260953
} |
answerdotai/ModernBERT-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"Wqkv"
] | 403,197,978 | 7,353,357 | 64 | 1944.59 | 803.9 | {
"accuracy": 0.8696648751185583,
"f1_macro": 0.8615520466335258,
"f1_weighted": 0.8696118331637468,
"precision": 0.8646678666301584,
"recall": 0.8590107351236551
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"fc1",
"fc2",
"out_proj"
] | 572,511,757 | 165,157,376 | 512 | 5814.55 | 2568.12 | {
"accuracy": 0.8993044577932343,
"f1_macro": 0.8948132965953287,
"f1_weighted": 0.8994168863709725,
"precision": 0.8956847616023478,
"recall": 0.8941679009907664
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense",
"in_proj",
"pos_proj"
] | 408,016,922 | 1,790,989 | 4 | 3613.07 | 1898.71 | {
"accuracy": 0.8578090420486879,
"f1_macro": 0.8446092300628985,
"f1_weighted": 0.8578612785245953,
"precision": 0.8492567246489446,
"recall": 0.8418384523871402
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 357,615,642 | 2,242,573 | 8 | 3292.14 | 1435.24 | {
"accuracy": 0.8760670249762883,
"f1_macro": 0.8692705789045871,
"f1_weighted": 0.8764015848224459,
"precision": 0.8713756336119993,
"recall": 0.867732993727411
} |
google-bert/bert-large-uncased | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 410,666,010 | 75,510,797 | 512 | 1114.18 | 683.0 | {
"accuracy": 0.884603224786595,
"f1_macro": 0.878605207347677,
"f1_weighted": 0.8847180290634366,
"precision": 0.8782405610261594,
"recall": 0.8791753431973165
} |
google/rembert | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense",
"embedding_hidden_mapping_in"
] | 633,048,602 | 57,113,229 | 128 | 5810.97 | 2901.63 | {
"accuracy": 0.9019917799557382,
"f1_macro": 0.8982219370986022,
"f1_weighted": 0.9021507602038787,
"precision": 0.900625623362189,
"recall": 0.8961772457902
} |
answerdotai/ModernBERT-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"Wqkv"
] | 410,538,010 | 14,693,389 | 128 | 2042.92 | 831.97 | {
"accuracy": 0.874328169459374,
"f1_macro": 0.8665932773699195,
"f1_weighted": 0.8742417767025412,
"precision": 0.8692475565339852,
"recall": 0.8643949760993761
} |
FacebookAI/xlm-roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"dense",
"out_proj"
] | 711,961,626 | 152,057,869 | 512 | 6843.98 | 2713.87 | {
"accuracy": 0.9010433133101486,
"f1_macro": 0.8969479131518704,
"f1_weighted": 0.9012152952855088,
"precision": 0.8976570979548679,
"recall": 0.8964894216574089
} |
answerdotai/ModernBERT-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"Wqkv"
] | 425,218,074 | 29,373,453 | 256 | 2248.69 | 874.61 | {
"accuracy": 0.8794656971229845,
"f1_macro": 0.8726974236966247,
"f1_weighted": 0.8794719106552626,
"precision": 0.8747160009190501,
"recall": 0.8710901196893861
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 358,795,290 | 3,422,221 | 16 | 3308.33 | 1440.69 | {
"accuracy": 0.8818368637369586,
"f1_macro": 0.875790887031754,
"f1_weighted": 0.8821591776492361,
"precision": 0.8772440147930095,
"recall": 0.8747788935196866
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense",
"in_proj",
"pos_proj"
] | 409,794,586 | 3,568,653 | 8 | 3629.29 | 1895.48 | {
"accuracy": 0.874328169459374,
"f1_macro": 0.8641103102381869,
"f1_weighted": 0.8745478117117244,
"precision": 0.866397138011316,
"recall": 0.8629314782882648
} |
answerdotai/ModernBERT-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"Wqkv"
] | 454,578,202 | 58,733,581 | 512 | 2562.46 | 944.06 | {
"accuracy": 0.8840499525766677,
"f1_macro": 0.8772364746801417,
"f1_weighted": 0.8840641342001619,
"precision": 0.8791632119549534,
"recall": 0.8756725309481499
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"k_proj",
"q_proj",
"v_proj"
] | 408,239,117 | 884,736 | 4 | 3882.55 | 1795.46 | {
"accuracy": 0.25418906101802086,
"f1_macro": 0.16746654763773686,
"f1_weighted": 0.19839063988848946,
"precision": 0.22534217234371567,
"recall": 0.2100578649092377
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 361,154,586 | 5,781,517 | 32 | 3344.93 | 1466.0 | {
"accuracy": 0.8857888080935821,
"f1_macro": 0.8799275312566462,
"f1_weighted": 0.886121637308196,
"precision": 0.8808806803033844,
"recall": 0.8793853471902271
} |
google/rembert | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense",
"embedding_hidden_mapping_in"
] | 690,146,842 | 114,211,469 | 256 | 6479.82 | 3174.09 | {
"accuracy": 0.90396775213405,
"f1_macro": 0.9000784127377291,
"f1_weighted": 0.9041240754395038,
"precision": 0.901687898049922,
"recall": 0.8987227054048579
} |
FacebookAI/xlm-roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 561,556,506 | 1,652,749 | 4 | 4920.26 | 1993.61 | {
"accuracy": 0.861681947518179,
"f1_macro": 0.8518543085684569,
"f1_weighted": 0.8616834766914389,
"precision": 0.8557438409368476,
"recall": 0.8491147729646762
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense",
"in_proj",
"pos_proj"
] | 413,349,914 | 7,123,981 | 16 | 3671.11 | 1911.46 | {
"accuracy": 0.8819949415112235,
"f1_macro": 0.8740560385409323,
"f1_weighted": 0.8822275343532041,
"precision": 0.874914517996927,
"recall": 0.8736064228611521
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 365,873,178 | 10,500,109 | 64 | 3416.66 | 1495.35 | {
"accuracy": 0.8890294024660133,
"f1_macro": 0.8832128667872327,
"f1_weighted": 0.889326249515431,
"precision": 0.8837432896318163,
"recall": 0.8829964929542189
} |
facebook/bart-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"k_proj",
"q_proj",
"v_proj"
] | 409,123,853 | 1,769,472 | 8 | 3896.36 | 1772.8 | {
"accuracy": 0.6342870692380651,
"f1_macro": 0.5243534507803612,
"f1_weighted": 0.5969217399151321,
"precision": 0.5656195963958232,
"recall": 0.5562342366705734
} |
albert/albert-xxlarge-v2 | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense",
"embedding_hidden_mapping_in",
"ffn",
"ffn_output",
"pooler"
] | 222,948,378 | 299,533 | 4 | 2293.62 | 3286.26 | {
"accuracy": 0.7384603224786594,
"f1_macro": 0.6632644496543156,
"f1_weighted": 0.7188668278957628,
"precision": 0.7557279016742403,
"recall": 0.6736103242013692
} |
FacebookAI/xlm-roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 562,146,330 | 2,242,573 | 8 | 4929.63 | 1982.89 | {
"accuracy": 0.8754347138792286,
"f1_macro": 0.8684851872815932,
"f1_weighted": 0.8756161000577907,
"precision": 0.8711312369445623,
"recall": 0.8663700730948646
} |
microsoft/deberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"classifier",
"dense",
"in_proj",
"pos_proj"
] | 420,460,570 | 14,234,637 | 32 | 3773.31 | 1937.34 | {
"accuracy": 0.8883970913689535,
"f1_macro": 0.881447477588251,
"f1_weighted": 0.8885959982355389,
"precision": 0.8823729100014337,
"recall": 0.8808481640861859
} |
FacebookAI/roberta-large | 50,775 | 12,652 | {
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
} | [
"key",
"query",
"value"
] | 375,310,362 | 19,937,293 | 128 | 3589.87 | 1523.41 | {
"accuracy": 0.8900569079987354,
"f1_macro": 0.8840841051769437,
"f1_weighted": 0.8903668026826507,
"precision": 0.8849892661664793,
"recall": 0.8835230328349574
} |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 21