Proactive-Interactive-R1-SFT-7B / trainer_state.json
Xinging's picture
Upload folder using huggingface_hub
0a19de1 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 375,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.008,
"grad_norm": 6.165421485900879,
"learning_rate": 0.0,
"loss": 2.3147,
"step": 1
},
{
"epoch": 0.016,
"grad_norm": 5.735071659088135,
"learning_rate": 8.333333333333333e-07,
"loss": 2.2195,
"step": 2
},
{
"epoch": 0.024,
"grad_norm": 6.245882987976074,
"learning_rate": 1.6666666666666667e-06,
"loss": 2.3793,
"step": 3
},
{
"epoch": 0.032,
"grad_norm": 6.186586856842041,
"learning_rate": 2.5e-06,
"loss": 2.2879,
"step": 4
},
{
"epoch": 0.04,
"grad_norm": 5.804585933685303,
"learning_rate": 3.3333333333333333e-06,
"loss": 2.2428,
"step": 5
},
{
"epoch": 0.048,
"grad_norm": 5.387149333953857,
"learning_rate": 4.166666666666667e-06,
"loss": 2.2197,
"step": 6
},
{
"epoch": 0.056,
"grad_norm": 4.933386325836182,
"learning_rate": 5e-06,
"loss": 2.0332,
"step": 7
},
{
"epoch": 0.064,
"grad_norm": 3.527738571166992,
"learning_rate": 5.833333333333334e-06,
"loss": 2.1247,
"step": 8
},
{
"epoch": 0.072,
"grad_norm": 3.6139931678771973,
"learning_rate": 6.666666666666667e-06,
"loss": 2.1694,
"step": 9
},
{
"epoch": 0.08,
"grad_norm": 2.918950080871582,
"learning_rate": 7.500000000000001e-06,
"loss": 2.1108,
"step": 10
},
{
"epoch": 0.088,
"grad_norm": 2.8728139400482178,
"learning_rate": 8.333333333333334e-06,
"loss": 2.0756,
"step": 11
},
{
"epoch": 0.096,
"grad_norm": 2.7820944786071777,
"learning_rate": 9.166666666666666e-06,
"loss": 2.0886,
"step": 12
},
{
"epoch": 0.104,
"grad_norm": 2.8887734413146973,
"learning_rate": 1e-05,
"loss": 1.9143,
"step": 13
},
{
"epoch": 0.112,
"grad_norm": 2.7269833087921143,
"learning_rate": 9.999812749151968e-06,
"loss": 1.7421,
"step": 14
},
{
"epoch": 0.12,
"grad_norm": 2.4814541339874268,
"learning_rate": 9.99925101063302e-06,
"loss": 1.9555,
"step": 15
},
{
"epoch": 0.128,
"grad_norm": 2.188281297683716,
"learning_rate": 9.998314826517564e-06,
"loss": 1.7679,
"step": 16
},
{
"epoch": 0.136,
"grad_norm": 2.072674036026001,
"learning_rate": 9.997004266926105e-06,
"loss": 1.8436,
"step": 17
},
{
"epoch": 0.144,
"grad_norm": 1.929784893989563,
"learning_rate": 9.995319430020004e-06,
"loss": 1.6908,
"step": 18
},
{
"epoch": 0.152,
"grad_norm": 1.532141089439392,
"learning_rate": 9.993260441994116e-06,
"loss": 1.8093,
"step": 19
},
{
"epoch": 0.16,
"grad_norm": 1.5804815292358398,
"learning_rate": 9.990827457067342e-06,
"loss": 1.8674,
"step": 20
},
{
"epoch": 0.168,
"grad_norm": 1.6191236972808838,
"learning_rate": 9.988020657471078e-06,
"loss": 1.8742,
"step": 21
},
{
"epoch": 0.176,
"grad_norm": 1.451058030128479,
"learning_rate": 9.984840253435569e-06,
"loss": 1.7627,
"step": 22
},
{
"epoch": 0.184,
"grad_norm": 1.297680139541626,
"learning_rate": 9.98128648317415e-06,
"loss": 1.7632,
"step": 23
},
{
"epoch": 0.192,
"grad_norm": 1.2567354440689087,
"learning_rate": 9.977359612865424e-06,
"loss": 1.7992,
"step": 24
},
{
"epoch": 0.2,
"grad_norm": 1.2900863885879517,
"learning_rate": 9.973059936633308e-06,
"loss": 1.7215,
"step": 25
},
{
"epoch": 0.208,
"grad_norm": 1.2776153087615967,
"learning_rate": 9.968387776525009e-06,
"loss": 1.7456,
"step": 26
},
{
"epoch": 0.216,
"grad_norm": 1.263024926185608,
"learning_rate": 9.963343482486907e-06,
"loss": 1.7512,
"step": 27
},
{
"epoch": 0.224,
"grad_norm": 1.2118524312973022,
"learning_rate": 9.957927432338332e-06,
"loss": 1.801,
"step": 28
},
{
"epoch": 0.232,
"grad_norm": 1.1955608129501343,
"learning_rate": 9.952140031743282e-06,
"loss": 1.7266,
"step": 29
},
{
"epoch": 0.24,
"grad_norm": 1.2853139638900757,
"learning_rate": 9.945981714180021e-06,
"loss": 1.8852,
"step": 30
},
{
"epoch": 0.248,
"grad_norm": 1.1693278551101685,
"learning_rate": 9.939452940908627e-06,
"loss": 1.6649,
"step": 31
},
{
"epoch": 0.256,
"grad_norm": 1.2286041975021362,
"learning_rate": 9.932554200936428e-06,
"loss": 1.7861,
"step": 32
},
{
"epoch": 0.264,
"grad_norm": 1.34682297706604,
"learning_rate": 9.925286010981394e-06,
"loss": 1.7849,
"step": 33
},
{
"epoch": 0.272,
"grad_norm": 1.2271584272384644,
"learning_rate": 9.917648915433413e-06,
"loss": 1.8268,
"step": 34
},
{
"epoch": 0.28,
"grad_norm": 1.2259082794189453,
"learning_rate": 9.909643486313533e-06,
"loss": 1.7176,
"step": 35
},
{
"epoch": 0.288,
"grad_norm": 1.1778770685195923,
"learning_rate": 9.901270323231114e-06,
"loss": 1.7909,
"step": 36
},
{
"epoch": 0.296,
"grad_norm": 1.1178228855133057,
"learning_rate": 9.892530053338909e-06,
"loss": 1.7618,
"step": 37
},
{
"epoch": 0.304,
"grad_norm": 1.1982990503311157,
"learning_rate": 9.883423331286096e-06,
"loss": 1.7316,
"step": 38
},
{
"epoch": 0.312,
"grad_norm": 1.0350874662399292,
"learning_rate": 9.873950839169248e-06,
"loss": 1.6342,
"step": 39
},
{
"epoch": 0.32,
"grad_norm": 1.188277244567871,
"learning_rate": 9.864113286481237e-06,
"loss": 1.7302,
"step": 40
},
{
"epoch": 0.328,
"grad_norm": 1.1097241640090942,
"learning_rate": 9.853911410058097e-06,
"loss": 1.5871,
"step": 41
},
{
"epoch": 0.336,
"grad_norm": 1.1486974954605103,
"learning_rate": 9.843345974023833e-06,
"loss": 1.7055,
"step": 42
},
{
"epoch": 0.344,
"grad_norm": 1.1390248537063599,
"learning_rate": 9.832417769733185e-06,
"loss": 1.725,
"step": 43
},
{
"epoch": 0.352,
"grad_norm": 1.1239397525787354,
"learning_rate": 9.821127615712365e-06,
"loss": 1.7716,
"step": 44
},
{
"epoch": 0.36,
"grad_norm": 1.095049262046814,
"learning_rate": 9.809476357597738e-06,
"loss": 1.6597,
"step": 45
},
{
"epoch": 0.368,
"grad_norm": 1.2124348878860474,
"learning_rate": 9.797464868072489e-06,
"loss": 1.7454,
"step": 46
},
{
"epoch": 0.376,
"grad_norm": 1.1528229713439941,
"learning_rate": 9.785094046801256e-06,
"loss": 1.813,
"step": 47
},
{
"epoch": 0.384,
"grad_norm": 1.1076029539108276,
"learning_rate": 9.77236482036275e-06,
"loss": 1.6276,
"step": 48
},
{
"epoch": 0.392,
"grad_norm": 1.1525791883468628,
"learning_rate": 9.759278142180348e-06,
"loss": 1.6772,
"step": 49
},
{
"epoch": 0.4,
"grad_norm": 1.1833432912826538,
"learning_rate": 9.745834992450688e-06,
"loss": 1.7564,
"step": 50
},
{
"epoch": 0.408,
"grad_norm": 1.220745325088501,
"learning_rate": 9.732036378070243e-06,
"loss": 1.7262,
"step": 51
},
{
"epoch": 0.416,
"grad_norm": 1.1543320417404175,
"learning_rate": 9.717883332559911e-06,
"loss": 1.5396,
"step": 52
},
{
"epoch": 0.424,
"grad_norm": 1.1147841215133667,
"learning_rate": 9.703376915987601e-06,
"loss": 1.7466,
"step": 53
},
{
"epoch": 0.432,
"grad_norm": 1.0171838998794556,
"learning_rate": 9.688518214888836e-06,
"loss": 1.6408,
"step": 54
},
{
"epoch": 0.44,
"grad_norm": 1.0049159526824951,
"learning_rate": 9.673308342185366e-06,
"loss": 1.5324,
"step": 55
},
{
"epoch": 0.448,
"grad_norm": 1.069504737854004,
"learning_rate": 9.657748437101819e-06,
"loss": 1.5823,
"step": 56
},
{
"epoch": 0.456,
"grad_norm": 1.0831701755523682,
"learning_rate": 9.641839665080363e-06,
"loss": 1.7311,
"step": 57
},
{
"epoch": 0.464,
"grad_norm": 1.0674666166305542,
"learning_rate": 9.625583217693419e-06,
"loss": 1.8164,
"step": 58
},
{
"epoch": 0.472,
"grad_norm": 1.0751819610595703,
"learning_rate": 9.60898031255441e-06,
"loss": 1.6059,
"step": 59
},
{
"epoch": 0.48,
"grad_norm": 1.0138529539108276,
"learning_rate": 9.592032193226564e-06,
"loss": 1.7617,
"step": 60
},
{
"epoch": 0.488,
"grad_norm": 1.0597162246704102,
"learning_rate": 9.574740129129767e-06,
"loss": 1.7207,
"step": 61
},
{
"epoch": 0.496,
"grad_norm": 1.0368247032165527,
"learning_rate": 9.557105415445485e-06,
"loss": 1.7482,
"step": 62
},
{
"epoch": 0.504,
"grad_norm": 1.0020555257797241,
"learning_rate": 9.539129373019755e-06,
"loss": 1.7692,
"step": 63
},
{
"epoch": 0.512,
"grad_norm": 1.0732332468032837,
"learning_rate": 9.520813348264252e-06,
"loss": 1.737,
"step": 64
},
{
"epoch": 0.52,
"grad_norm": 0.9923004508018494,
"learning_rate": 9.502158713055444e-06,
"loss": 1.6523,
"step": 65
},
{
"epoch": 0.528,
"grad_norm": 1.0406478643417358,
"learning_rate": 9.483166864631837e-06,
"loss": 1.7734,
"step": 66
},
{
"epoch": 0.536,
"grad_norm": 1.030851125717163,
"learning_rate": 9.46383922548932e-06,
"loss": 1.6183,
"step": 67
},
{
"epoch": 0.544,
"grad_norm": 1.0091995000839233,
"learning_rate": 9.444177243274619e-06,
"loss": 1.6967,
"step": 68
},
{
"epoch": 0.552,
"grad_norm": 1.0016194581985474,
"learning_rate": 9.424182390676872e-06,
"loss": 1.5285,
"step": 69
},
{
"epoch": 0.56,
"grad_norm": 1.0569149255752563,
"learning_rate": 9.403856165317322e-06,
"loss": 1.7399,
"step": 70
},
{
"epoch": 0.568,
"grad_norm": 1.0487375259399414,
"learning_rate": 9.383200089637143e-06,
"loss": 1.6845,
"step": 71
},
{
"epoch": 0.576,
"grad_norm": 1.0257350206375122,
"learning_rate": 9.362215710783411e-06,
"loss": 1.6052,
"step": 72
},
{
"epoch": 0.584,
"grad_norm": 0.9415053129196167,
"learning_rate": 9.34090460049322e-06,
"loss": 1.6624,
"step": 73
},
{
"epoch": 0.592,
"grad_norm": 0.982798159122467,
"learning_rate": 9.319268354975958e-06,
"loss": 1.581,
"step": 74
},
{
"epoch": 0.6,
"grad_norm": 1.0241587162017822,
"learning_rate": 9.297308594793757e-06,
"loss": 1.6495,
"step": 75
},
{
"epoch": 0.608,
"grad_norm": 1.0798351764678955,
"learning_rate": 9.275026964740101e-06,
"loss": 1.6056,
"step": 76
},
{
"epoch": 0.616,
"grad_norm": 1.0368587970733643,
"learning_rate": 9.252425133716639e-06,
"loss": 1.679,
"step": 77
},
{
"epoch": 0.624,
"grad_norm": 1.019658088684082,
"learning_rate": 9.229504794608182e-06,
"loss": 1.6168,
"step": 78
},
{
"epoch": 0.632,
"grad_norm": 0.9923626184463501,
"learning_rate": 9.206267664155906e-06,
"loss": 1.6812,
"step": 79
},
{
"epoch": 0.64,
"grad_norm": 0.9651780724525452,
"learning_rate": 9.182715482828764e-06,
"loss": 1.6214,
"step": 80
},
{
"epoch": 0.648,
"grad_norm": 1.065513014793396,
"learning_rate": 9.158850014693123e-06,
"loss": 1.6537,
"step": 81
},
{
"epoch": 0.656,
"grad_norm": 0.9438872933387756,
"learning_rate": 9.134673047280644e-06,
"loss": 1.602,
"step": 82
},
{
"epoch": 0.664,
"grad_norm": 1.0228830575942993,
"learning_rate": 9.110186391454389e-06,
"loss": 1.7139,
"step": 83
},
{
"epoch": 0.672,
"grad_norm": 1.0108044147491455,
"learning_rate": 9.085391881273182e-06,
"loss": 1.6621,
"step": 84
},
{
"epoch": 0.68,
"grad_norm": 0.9967405200004578,
"learning_rate": 9.060291373854252e-06,
"loss": 1.6323,
"step": 85
},
{
"epoch": 0.688,
"grad_norm": 1.0818958282470703,
"learning_rate": 9.034886749234112e-06,
"loss": 1.6435,
"step": 86
},
{
"epoch": 0.696,
"grad_norm": 1.012364149093628,
"learning_rate": 9.009179910227767e-06,
"loss": 1.6756,
"step": 87
},
{
"epoch": 0.704,
"grad_norm": 1.084316372871399,
"learning_rate": 8.98317278228618e-06,
"loss": 1.6813,
"step": 88
},
{
"epoch": 0.712,
"grad_norm": 1.0937138795852661,
"learning_rate": 8.956867313352055e-06,
"loss": 1.6627,
"step": 89
},
{
"epoch": 0.72,
"grad_norm": 1.081781268119812,
"learning_rate": 8.930265473713939e-06,
"loss": 1.8571,
"step": 90
},
{
"epoch": 0.728,
"grad_norm": 0.9865168929100037,
"learning_rate": 8.90336925585864e-06,
"loss": 1.4989,
"step": 91
},
{
"epoch": 0.736,
"grad_norm": 1.0438846349716187,
"learning_rate": 8.876180674322006e-06,
"loss": 1.6913,
"step": 92
},
{
"epoch": 0.744,
"grad_norm": 1.0505565404891968,
"learning_rate": 8.84870176553801e-06,
"loss": 1.6397,
"step": 93
},
{
"epoch": 0.752,
"grad_norm": 0.908542275428772,
"learning_rate": 8.820934587686247e-06,
"loss": 1.5152,
"step": 94
},
{
"epoch": 0.76,
"grad_norm": 1.0538814067840576,
"learning_rate": 8.792881220537752e-06,
"loss": 1.5435,
"step": 95
},
{
"epoch": 0.768,
"grad_norm": 1.0938372611999512,
"learning_rate": 8.764543765299245e-06,
"loss": 1.5283,
"step": 96
},
{
"epoch": 0.776,
"grad_norm": 1.1297836303710938,
"learning_rate": 8.735924344455732e-06,
"loss": 1.6756,
"step": 97
},
{
"epoch": 0.784,
"grad_norm": 0.9867210984230042,
"learning_rate": 8.707025101611546e-06,
"loss": 1.613,
"step": 98
},
{
"epoch": 0.792,
"grad_norm": 1.0464468002319336,
"learning_rate": 8.677848201329775e-06,
"loss": 1.5942,
"step": 99
},
{
"epoch": 0.8,
"grad_norm": 1.0653105974197388,
"learning_rate": 8.64839582897015e-06,
"loss": 1.7216,
"step": 100
},
{
"epoch": 0.808,
"grad_norm": 0.9832385778427124,
"learning_rate": 8.61867019052535e-06,
"loss": 1.4699,
"step": 101
},
{
"epoch": 0.816,
"grad_norm": 0.9331985116004944,
"learning_rate": 8.588673512455781e-06,
"loss": 1.7439,
"step": 102
},
{
"epoch": 0.824,
"grad_norm": 0.9516758322715759,
"learning_rate": 8.558408041522801e-06,
"loss": 1.696,
"step": 103
},
{
"epoch": 0.832,
"grad_norm": 1.0405412912368774,
"learning_rate": 8.527876044620453e-06,
"loss": 1.713,
"step": 104
},
{
"epoch": 0.84,
"grad_norm": 0.9289470314979553,
"learning_rate": 8.497079808605659e-06,
"loss": 1.7213,
"step": 105
},
{
"epoch": 0.848,
"grad_norm": 0.9201377034187317,
"learning_rate": 8.466021640126946e-06,
"loss": 1.6091,
"step": 106
},
{
"epoch": 0.856,
"grad_norm": 0.9733519554138184,
"learning_rate": 8.434703865451666e-06,
"loss": 1.6345,
"step": 107
},
{
"epoch": 0.864,
"grad_norm": 1.0086345672607422,
"learning_rate": 8.403128830291767e-06,
"loss": 1.6443,
"step": 108
},
{
"epoch": 0.872,
"grad_norm": 1.031723141670227,
"learning_rate": 8.371298899628091e-06,
"loss": 1.6869,
"step": 109
},
{
"epoch": 0.88,
"grad_norm": 0.98936527967453,
"learning_rate": 8.339216457533244e-06,
"loss": 1.5866,
"step": 110
},
{
"epoch": 0.888,
"grad_norm": 0.962455689907074,
"learning_rate": 8.306883906993022e-06,
"loss": 1.6483,
"step": 111
},
{
"epoch": 0.896,
"grad_norm": 0.9804932475090027,
"learning_rate": 8.274303669726427e-06,
"loss": 1.594,
"step": 112
},
{
"epoch": 0.904,
"grad_norm": 0.9484031200408936,
"learning_rate": 8.24147818600428e-06,
"loss": 1.4755,
"step": 113
},
{
"epoch": 0.912,
"grad_norm": 1.0301589965820312,
"learning_rate": 8.20840991446645e-06,
"loss": 1.6598,
"step": 114
},
{
"epoch": 0.92,
"grad_norm": 0.9916971325874329,
"learning_rate": 8.175101331937692e-06,
"loss": 1.5304,
"step": 115
},
{
"epoch": 0.928,
"grad_norm": 0.9457253813743591,
"learning_rate": 8.141554933242135e-06,
"loss": 1.6354,
"step": 116
},
{
"epoch": 0.936,
"grad_norm": 1.002668023109436,
"learning_rate": 8.10777323101642e-06,
"loss": 1.5989,
"step": 117
},
{
"epoch": 0.944,
"grad_norm": 1.0599579811096191,
"learning_rate": 8.073758755521506e-06,
"loss": 1.6564,
"step": 118
},
{
"epoch": 0.952,
"grad_norm": 0.9575379490852356,
"learning_rate": 8.03951405445314e-06,
"loss": 1.5884,
"step": 119
},
{
"epoch": 0.96,
"grad_norm": 0.8948677182197571,
"learning_rate": 8.005041692751055e-06,
"loss": 1.5128,
"step": 120
},
{
"epoch": 0.968,
"grad_norm": 0.9918161034584045,
"learning_rate": 7.970344252406832e-06,
"loss": 1.7014,
"step": 121
},
{
"epoch": 0.976,
"grad_norm": 1.0054118633270264,
"learning_rate": 7.935424332270523e-06,
"loss": 1.6341,
"step": 122
},
{
"epoch": 0.984,
"grad_norm": 0.9967445731163025,
"learning_rate": 7.900284547855992e-06,
"loss": 1.6521,
"step": 123
},
{
"epoch": 0.992,
"grad_norm": 1.0111421346664429,
"learning_rate": 7.864927531145012e-06,
"loss": 1.7013,
"step": 124
},
{
"epoch": 1.0,
"grad_norm": 0.9393289685249329,
"learning_rate": 7.829355930390126e-06,
"loss": 1.5216,
"step": 125
},
{
"epoch": 1.008,
"grad_norm": 0.9452322125434875,
"learning_rate": 7.7935724099163e-06,
"loss": 1.4109,
"step": 126
},
{
"epoch": 1.016,
"grad_norm": 1.041080355644226,
"learning_rate": 7.757579649921354e-06,
"loss": 1.4167,
"step": 127
},
{
"epoch": 1.024,
"grad_norm": 1.0035260915756226,
"learning_rate": 7.721380346275221e-06,
"loss": 1.4185,
"step": 128
},
{
"epoch": 1.032,
"grad_norm": 0.9582903385162354,
"learning_rate": 7.684977210318024e-06,
"loss": 1.5076,
"step": 129
},
{
"epoch": 1.04,
"grad_norm": 0.9640058279037476,
"learning_rate": 7.648372968656995e-06,
"loss": 1.4809,
"step": 130
},
{
"epoch": 1.048,
"grad_norm": 0.9298117160797119,
"learning_rate": 7.611570362962247e-06,
"loss": 1.4613,
"step": 131
},
{
"epoch": 1.056,
"grad_norm": 0.922681450843811,
"learning_rate": 7.574572149761437e-06,
"loss": 1.3367,
"step": 132
},
{
"epoch": 1.064,
"grad_norm": 0.9627285599708557,
"learning_rate": 7.5373811002332785e-06,
"loss": 1.3333,
"step": 133
},
{
"epoch": 1.072,
"grad_norm": 0.9147132635116577,
"learning_rate": 7.500000000000001e-06,
"loss": 1.2811,
"step": 134
},
{
"epoch": 1.08,
"grad_norm": 0.942388117313385,
"learning_rate": 7.462431648918689e-06,
"loss": 1.2576,
"step": 135
},
{
"epoch": 1.088,
"grad_norm": 0.9468157291412354,
"learning_rate": 7.424678860871584e-06,
"loss": 1.3759,
"step": 136
},
{
"epoch": 1.096,
"grad_norm": 0.8806234002113342,
"learning_rate": 7.3867444635553165e-06,
"loss": 1.2774,
"step": 137
},
{
"epoch": 1.104,
"grad_norm": 0.962507426738739,
"learning_rate": 7.3486312982691134e-06,
"loss": 1.346,
"step": 138
},
{
"epoch": 1.112,
"grad_norm": 0.9227285385131836,
"learning_rate": 7.310342219701981e-06,
"loss": 1.3977,
"step": 139
},
{
"epoch": 1.12,
"grad_norm": 0.9752129316329956,
"learning_rate": 7.271880095718895e-06,
"loss": 1.3203,
"step": 140
},
{
"epoch": 1.1280000000000001,
"grad_norm": 0.9896626472473145,
"learning_rate": 7.233247807145989e-06,
"loss": 1.3563,
"step": 141
},
{
"epoch": 1.1360000000000001,
"grad_norm": 0.9603149890899658,
"learning_rate": 7.19444824755478e-06,
"loss": 1.4031,
"step": 142
},
{
"epoch": 1.144,
"grad_norm": 0.9318594336509705,
"learning_rate": 7.155484323045442e-06,
"loss": 1.3123,
"step": 143
},
{
"epoch": 1.152,
"grad_norm": 0.9754460453987122,
"learning_rate": 7.11635895202914e-06,
"loss": 1.3359,
"step": 144
},
{
"epoch": 1.16,
"grad_norm": 0.9400285482406616,
"learning_rate": 7.0770750650094335e-06,
"loss": 1.4203,
"step": 145
},
{
"epoch": 1.168,
"grad_norm": 0.9439907073974609,
"learning_rate": 7.037635604362786e-06,
"loss": 1.3475,
"step": 146
},
{
"epoch": 1.176,
"grad_norm": 0.9856473803520203,
"learning_rate": 6.9980435241181785e-06,
"loss": 1.3728,
"step": 147
},
{
"epoch": 1.184,
"grad_norm": 1.0352829694747925,
"learning_rate": 6.958301789735853e-06,
"loss": 1.4341,
"step": 148
},
{
"epoch": 1.192,
"grad_norm": 1.0069676637649536,
"learning_rate": 6.918413377885193e-06,
"loss": 1.4365,
"step": 149
},
{
"epoch": 1.2,
"grad_norm": 0.9149236679077148,
"learning_rate": 6.878381276221777e-06,
"loss": 1.3273,
"step": 150
},
{
"epoch": 1.208,
"grad_norm": 0.9450361132621765,
"learning_rate": 6.838208483163601e-06,
"loss": 1.3297,
"step": 151
},
{
"epoch": 1.216,
"grad_norm": 1.0741515159606934,
"learning_rate": 6.797898007666493e-06,
"loss": 1.3977,
"step": 152
},
{
"epoch": 1.224,
"grad_norm": 0.9067620038986206,
"learning_rate": 6.757452868998737e-06,
"loss": 1.2802,
"step": 153
},
{
"epoch": 1.232,
"grad_norm": 0.9164926409721375,
"learning_rate": 6.716876096514944e-06,
"loss": 1.3741,
"step": 154
},
{
"epoch": 1.24,
"grad_norm": 0.9673436880111694,
"learning_rate": 6.676170729429132e-06,
"loss": 1.2866,
"step": 155
},
{
"epoch": 1.248,
"grad_norm": 0.8857837915420532,
"learning_rate": 6.635339816587109e-06,
"loss": 1.3524,
"step": 156
},
{
"epoch": 1.256,
"grad_norm": 1.0112680196762085,
"learning_rate": 6.594386416238095e-06,
"loss": 1.4385,
"step": 157
},
{
"epoch": 1.264,
"grad_norm": 0.9922051429748535,
"learning_rate": 6.553313595805666e-06,
"loss": 1.3711,
"step": 158
},
{
"epoch": 1.272,
"grad_norm": 0.9454922676086426,
"learning_rate": 6.512124431658006e-06,
"loss": 1.392,
"step": 159
},
{
"epoch": 1.28,
"grad_norm": 0.9144309759140015,
"learning_rate": 6.470822008877482e-06,
"loss": 1.3345,
"step": 160
},
{
"epoch": 1.288,
"grad_norm": 1.0063748359680176,
"learning_rate": 6.4294094210295725e-06,
"loss": 1.4048,
"step": 161
},
{
"epoch": 1.296,
"grad_norm": 1.0005673170089722,
"learning_rate": 6.3878897699311525e-06,
"loss": 1.4097,
"step": 162
},
{
"epoch": 1.304,
"grad_norm": 0.9271945357322693,
"learning_rate": 6.346266165418173e-06,
"loss": 1.3149,
"step": 163
},
{
"epoch": 1.312,
"grad_norm": 0.997925341129303,
"learning_rate": 6.304541725112734e-06,
"loss": 1.4207,
"step": 164
},
{
"epoch": 1.32,
"grad_norm": 1.0391231775283813,
"learning_rate": 6.262719574189564e-06,
"loss": 1.4244,
"step": 165
},
{
"epoch": 1.328,
"grad_norm": 1.0015145540237427,
"learning_rate": 6.2208028451419575e-06,
"loss": 1.2237,
"step": 166
},
{
"epoch": 1.336,
"grad_norm": 0.9355135560035706,
"learning_rate": 6.178794677547138e-06,
"loss": 1.3877,
"step": 167
},
{
"epoch": 1.3439999999999999,
"grad_norm": 0.9757426381111145,
"learning_rate": 6.136698217831106e-06,
"loss": 1.4125,
"step": 168
},
{
"epoch": 1.3519999999999999,
"grad_norm": 0.9164122939109802,
"learning_rate": 6.094516619032975e-06,
"loss": 1.3165,
"step": 169
},
{
"epoch": 1.3599999999999999,
"grad_norm": 0.902611255645752,
"learning_rate": 6.052253040568804e-06,
"loss": 1.2087,
"step": 170
},
{
"epoch": 1.3679999999999999,
"grad_norm": 0.9562743902206421,
"learning_rate": 6.009910647994956e-06,
"loss": 1.298,
"step": 171
},
{
"epoch": 1.376,
"grad_norm": 0.9646859169006348,
"learning_rate": 5.967492612770999e-06,
"loss": 1.478,
"step": 172
},
{
"epoch": 1.384,
"grad_norm": 0.9848045110702515,
"learning_rate": 5.925002112022158e-06,
"loss": 1.3841,
"step": 173
},
{
"epoch": 1.392,
"grad_norm": 0.9551551342010498,
"learning_rate": 5.882442328301356e-06,
"loss": 1.3204,
"step": 174
},
{
"epoch": 1.4,
"grad_norm": 0.9004006385803223,
"learning_rate": 5.839816449350824e-06,
"loss": 1.2993,
"step": 175
},
{
"epoch": 1.408,
"grad_norm": 0.9321911334991455,
"learning_rate": 5.7971276678633625e-06,
"loss": 1.3931,
"step": 176
},
{
"epoch": 1.416,
"grad_norm": 0.9299284219741821,
"learning_rate": 5.754379181243179e-06,
"loss": 1.3798,
"step": 177
},
{
"epoch": 1.424,
"grad_norm": 0.9809712171554565,
"learning_rate": 5.711574191366427e-06,
"loss": 1.3581,
"step": 178
},
{
"epoch": 1.432,
"grad_norm": 0.9836567640304565,
"learning_rate": 5.668715904341365e-06,
"loss": 1.333,
"step": 179
},
{
"epoch": 1.44,
"grad_norm": 0.9797928333282471,
"learning_rate": 5.62580753026823e-06,
"loss": 1.369,
"step": 180
},
{
"epoch": 1.448,
"grad_norm": 0.8885632753372192,
"learning_rate": 5.5828522829987965e-06,
"loss": 1.207,
"step": 181
},
{
"epoch": 1.456,
"grad_norm": 0.9868252277374268,
"learning_rate": 5.539853379895656e-06,
"loss": 1.3628,
"step": 182
},
{
"epoch": 1.464,
"grad_norm": 0.9249109029769897,
"learning_rate": 5.496814041591234e-06,
"loss": 1.3174,
"step": 183
},
{
"epoch": 1.472,
"grad_norm": 0.9424538612365723,
"learning_rate": 5.453737491746572e-06,
"loss": 1.2956,
"step": 184
},
{
"epoch": 1.48,
"grad_norm": 1.0274248123168945,
"learning_rate": 5.410626956809864e-06,
"loss": 1.4769,
"step": 185
},
{
"epoch": 1.488,
"grad_norm": 0.9153403043746948,
"learning_rate": 5.367485665774802e-06,
"loss": 1.2785,
"step": 186
},
{
"epoch": 1.496,
"grad_norm": 0.9761044383049011,
"learning_rate": 5.324316849938715e-06,
"loss": 1.3007,
"step": 187
},
{
"epoch": 1.504,
"grad_norm": 0.9353325366973877,
"learning_rate": 5.281123742660558e-06,
"loss": 1.3652,
"step": 188
},
{
"epoch": 1.512,
"grad_norm": 1.0336346626281738,
"learning_rate": 5.237909579118713e-06,
"loss": 1.4483,
"step": 189
},
{
"epoch": 1.52,
"grad_norm": 0.968720555305481,
"learning_rate": 5.194677596068689e-06,
"loss": 1.3331,
"step": 190
},
{
"epoch": 1.528,
"grad_norm": 0.9536193609237671,
"learning_rate": 5.1514310316006835e-06,
"loss": 1.3594,
"step": 191
},
{
"epoch": 1.536,
"grad_norm": 0.9640070199966431,
"learning_rate": 5.1081731248970435e-06,
"loss": 1.3753,
"step": 192
},
{
"epoch": 1.544,
"grad_norm": 0.8998177647590637,
"learning_rate": 5.064907115989655e-06,
"loss": 1.3207,
"step": 193
},
{
"epoch": 1.552,
"grad_norm": 1.0492256879806519,
"learning_rate": 5.021636245517261e-06,
"loss": 1.2969,
"step": 194
},
{
"epoch": 1.56,
"grad_norm": 0.9381954073905945,
"learning_rate": 4.978363754482741e-06,
"loss": 1.3814,
"step": 195
},
{
"epoch": 1.568,
"grad_norm": 0.9293028712272644,
"learning_rate": 4.935092884010347e-06,
"loss": 1.3583,
"step": 196
},
{
"epoch": 1.576,
"grad_norm": 1.0378097295761108,
"learning_rate": 4.891826875102958e-06,
"loss": 1.4385,
"step": 197
},
{
"epoch": 1.584,
"grad_norm": 0.9952184557914734,
"learning_rate": 4.848568968399317e-06,
"loss": 1.4347,
"step": 198
},
{
"epoch": 1.592,
"grad_norm": 0.9569420218467712,
"learning_rate": 4.805322403931312e-06,
"loss": 1.3173,
"step": 199
},
{
"epoch": 1.6,
"grad_norm": 1.0088790655136108,
"learning_rate": 4.762090420881289e-06,
"loss": 1.4581,
"step": 200
},
{
"epoch": 1.608,
"grad_norm": 0.9280454516410828,
"learning_rate": 4.718876257339444e-06,
"loss": 1.3553,
"step": 201
},
{
"epoch": 1.616,
"grad_norm": 0.9970977306365967,
"learning_rate": 4.6756831500612846e-06,
"loss": 1.2876,
"step": 202
},
{
"epoch": 1.624,
"grad_norm": 0.9597547650337219,
"learning_rate": 4.632514334225201e-06,
"loss": 1.4021,
"step": 203
},
{
"epoch": 1.6320000000000001,
"grad_norm": 1.0041824579238892,
"learning_rate": 4.589373043190137e-06,
"loss": 1.492,
"step": 204
},
{
"epoch": 1.6400000000000001,
"grad_norm": 0.9870665669441223,
"learning_rate": 4.546262508253429e-06,
"loss": 1.3503,
"step": 205
},
{
"epoch": 1.6480000000000001,
"grad_norm": 0.9604246616363525,
"learning_rate": 4.503185958408767e-06,
"loss": 1.3706,
"step": 206
},
{
"epoch": 1.6560000000000001,
"grad_norm": 1.0384790897369385,
"learning_rate": 4.460146620104347e-06,
"loss": 1.2973,
"step": 207
},
{
"epoch": 1.6640000000000001,
"grad_norm": 0.9975892901420593,
"learning_rate": 4.417147717001205e-06,
"loss": 1.3999,
"step": 208
},
{
"epoch": 1.6720000000000002,
"grad_norm": 0.9257301092147827,
"learning_rate": 4.374192469731771e-06,
"loss": 1.3405,
"step": 209
},
{
"epoch": 1.6800000000000002,
"grad_norm": 0.914090633392334,
"learning_rate": 4.331284095658637e-06,
"loss": 1.2838,
"step": 210
},
{
"epoch": 1.688,
"grad_norm": 0.9512580633163452,
"learning_rate": 4.2884258086335755e-06,
"loss": 1.3207,
"step": 211
},
{
"epoch": 1.696,
"grad_norm": 0.92391437292099,
"learning_rate": 4.245620818756822e-06,
"loss": 1.3182,
"step": 212
},
{
"epoch": 1.704,
"grad_norm": 0.947001576423645,
"learning_rate": 4.202872332136639e-06,
"loss": 1.299,
"step": 213
},
{
"epoch": 1.712,
"grad_norm": 0.9795234799385071,
"learning_rate": 4.160183550649176e-06,
"loss": 1.3005,
"step": 214
},
{
"epoch": 1.72,
"grad_norm": 0.9202683568000793,
"learning_rate": 4.117557671698648e-06,
"loss": 1.2494,
"step": 215
},
{
"epoch": 1.728,
"grad_norm": 0.927078127861023,
"learning_rate": 4.074997887977843e-06,
"loss": 1.4299,
"step": 216
},
{
"epoch": 1.736,
"grad_norm": 1.0304216146469116,
"learning_rate": 4.032507387229002e-06,
"loss": 1.379,
"step": 217
},
{
"epoch": 1.744,
"grad_norm": 0.9599056839942932,
"learning_rate": 3.9900893520050446e-06,
"loss": 1.3061,
"step": 218
},
{
"epoch": 1.752,
"grad_norm": 1.0049461126327515,
"learning_rate": 3.9477469594311975e-06,
"loss": 1.3921,
"step": 219
},
{
"epoch": 1.76,
"grad_norm": 0.9615343809127808,
"learning_rate": 3.905483380967027e-06,
"loss": 1.3521,
"step": 220
},
{
"epoch": 1.768,
"grad_norm": 0.900286853313446,
"learning_rate": 3.863301782168896e-06,
"loss": 1.3919,
"step": 221
},
{
"epoch": 1.776,
"grad_norm": 0.9626889228820801,
"learning_rate": 3.821205322452863e-06,
"loss": 1.4446,
"step": 222
},
{
"epoch": 1.784,
"grad_norm": 0.9698411822319031,
"learning_rate": 3.779197154858044e-06,
"loss": 1.4121,
"step": 223
},
{
"epoch": 1.792,
"grad_norm": 0.9630154967308044,
"learning_rate": 3.7372804258104367e-06,
"loss": 1.3486,
"step": 224
},
{
"epoch": 1.8,
"grad_norm": 0.9816241264343262,
"learning_rate": 3.695458274887268e-06,
"loss": 1.3536,
"step": 225
},
{
"epoch": 1.808,
"grad_norm": 0.9554859399795532,
"learning_rate": 3.6537338345818273e-06,
"loss": 1.3906,
"step": 226
},
{
"epoch": 1.8159999999999998,
"grad_norm": 0.9753232598304749,
"learning_rate": 3.6121102300688504e-06,
"loss": 1.4416,
"step": 227
},
{
"epoch": 1.8239999999999998,
"grad_norm": 1.023533821105957,
"learning_rate": 3.5705905789704296e-06,
"loss": 1.4409,
"step": 228
},
{
"epoch": 1.8319999999999999,
"grad_norm": 0.9600017070770264,
"learning_rate": 3.529177991122519e-06,
"loss": 1.4683,
"step": 229
},
{
"epoch": 1.8399999999999999,
"grad_norm": 0.9704511761665344,
"learning_rate": 3.487875568341995e-06,
"loss": 1.3358,
"step": 230
},
{
"epoch": 1.8479999999999999,
"grad_norm": 0.9900558590888977,
"learning_rate": 3.446686404194337e-06,
"loss": 1.441,
"step": 231
},
{
"epoch": 1.8559999999999999,
"grad_norm": 1.0063139200210571,
"learning_rate": 3.4056135837619077e-06,
"loss": 1.3806,
"step": 232
},
{
"epoch": 1.8639999999999999,
"grad_norm": 0.9783025979995728,
"learning_rate": 3.3646601834128924e-06,
"loss": 1.2891,
"step": 233
},
{
"epoch": 1.8719999999999999,
"grad_norm": 0.9990521669387817,
"learning_rate": 3.3238292705708675e-06,
"loss": 1.3966,
"step": 234
},
{
"epoch": 1.88,
"grad_norm": 0.9336013197898865,
"learning_rate": 3.2831239034850593e-06,
"loss": 1.3219,
"step": 235
},
{
"epoch": 1.888,
"grad_norm": 1.0710481405258179,
"learning_rate": 3.2425471310012645e-06,
"loss": 1.5404,
"step": 236
},
{
"epoch": 1.896,
"grad_norm": 0.9238635897636414,
"learning_rate": 3.2021019923335093e-06,
"loss": 1.2135,
"step": 237
},
{
"epoch": 1.904,
"grad_norm": 0.9419047236442566,
"learning_rate": 3.1617915168363994e-06,
"loss": 1.2094,
"step": 238
},
{
"epoch": 1.912,
"grad_norm": 0.9390449523925781,
"learning_rate": 3.121618723778225e-06,
"loss": 1.3233,
"step": 239
},
{
"epoch": 1.92,
"grad_norm": 0.9554815292358398,
"learning_rate": 3.081586622114809e-06,
"loss": 1.3909,
"step": 240
},
{
"epoch": 1.928,
"grad_norm": 0.9486646056175232,
"learning_rate": 3.041698210264149e-06,
"loss": 1.3041,
"step": 241
},
{
"epoch": 1.936,
"grad_norm": 0.9331883788108826,
"learning_rate": 3.001956475881822e-06,
"loss": 1.256,
"step": 242
},
{
"epoch": 1.944,
"grad_norm": 0.9461354613304138,
"learning_rate": 2.962364395637216e-06,
"loss": 1.4119,
"step": 243
},
{
"epoch": 1.952,
"grad_norm": 0.9538270831108093,
"learning_rate": 2.9229249349905686e-06,
"loss": 1.238,
"step": 244
},
{
"epoch": 1.96,
"grad_norm": 0.9340187907218933,
"learning_rate": 2.8836410479708625e-06,
"loss": 1.3517,
"step": 245
},
{
"epoch": 1.968,
"grad_norm": 1.0196490287780762,
"learning_rate": 2.84451567695456e-06,
"loss": 1.2968,
"step": 246
},
{
"epoch": 1.976,
"grad_norm": 1.013819694519043,
"learning_rate": 2.805551752445222e-06,
"loss": 1.3458,
"step": 247
},
{
"epoch": 1.984,
"grad_norm": 0.9020823240280151,
"learning_rate": 2.766752192854012e-06,
"loss": 1.424,
"step": 248
},
{
"epoch": 1.992,
"grad_norm": 0.8842499852180481,
"learning_rate": 2.728119904281105e-06,
"loss": 1.2664,
"step": 249
},
{
"epoch": 2.0,
"grad_norm": 0.9340562224388123,
"learning_rate": 2.689657780298019e-06,
"loss": 1.3434,
"step": 250
},
{
"epoch": 2.008,
"grad_norm": 1.005499005317688,
"learning_rate": 2.651368701730889e-06,
"loss": 1.2695,
"step": 251
},
{
"epoch": 2.016,
"grad_norm": 0.9357620477676392,
"learning_rate": 2.6132555364446856e-06,
"loss": 1.1521,
"step": 252
},
{
"epoch": 2.024,
"grad_norm": 0.9387351870536804,
"learning_rate": 2.5753211391284172e-06,
"loss": 1.2569,
"step": 253
},
{
"epoch": 2.032,
"grad_norm": 0.9448952674865723,
"learning_rate": 2.537568351081311e-06,
"loss": 1.1739,
"step": 254
},
{
"epoch": 2.04,
"grad_norm": 0.895693302154541,
"learning_rate": 2.5000000000000015e-06,
"loss": 1.1113,
"step": 255
},
{
"epoch": 2.048,
"grad_norm": 0.9334677457809448,
"learning_rate": 2.4626188997667224e-06,
"loss": 1.2154,
"step": 256
},
{
"epoch": 2.056,
"grad_norm": 0.968984067440033,
"learning_rate": 2.425427850238565e-06,
"loss": 1.1986,
"step": 257
},
{
"epoch": 2.064,
"grad_norm": 0.8969867825508118,
"learning_rate": 2.388429637037753e-06,
"loss": 1.1513,
"step": 258
},
{
"epoch": 2.072,
"grad_norm": 0.9212546348571777,
"learning_rate": 2.3516270313430085e-06,
"loss": 1.1515,
"step": 259
},
{
"epoch": 2.08,
"grad_norm": 0.9310314059257507,
"learning_rate": 2.3150227896819782e-06,
"loss": 1.1398,
"step": 260
},
{
"epoch": 2.088,
"grad_norm": 0.8860301971435547,
"learning_rate": 2.278619653724781e-06,
"loss": 1.2116,
"step": 261
},
{
"epoch": 2.096,
"grad_norm": 0.9003278613090515,
"learning_rate": 2.2424203500786473e-06,
"loss": 1.1741,
"step": 262
},
{
"epoch": 2.104,
"grad_norm": 0.9402487874031067,
"learning_rate": 2.206427590083703e-06,
"loss": 1.2057,
"step": 263
},
{
"epoch": 2.112,
"grad_norm": 0.8900340795516968,
"learning_rate": 2.170644069609876e-06,
"loss": 1.1549,
"step": 264
},
{
"epoch": 2.12,
"grad_norm": 0.9224807620048523,
"learning_rate": 2.1350724688549906e-06,
"loss": 1.2437,
"step": 265
},
{
"epoch": 2.128,
"grad_norm": 0.9042741060256958,
"learning_rate": 2.09971545214401e-06,
"loss": 1.1539,
"step": 266
},
{
"epoch": 2.136,
"grad_norm": 0.9493343830108643,
"learning_rate": 2.0645756677294788e-06,
"loss": 1.186,
"step": 267
},
{
"epoch": 2.144,
"grad_norm": 0.97528475522995,
"learning_rate": 2.029655747593169e-06,
"loss": 1.2699,
"step": 268
},
{
"epoch": 2.152,
"grad_norm": 0.914726972579956,
"learning_rate": 1.9949583072489455e-06,
"loss": 1.1699,
"step": 269
},
{
"epoch": 2.16,
"grad_norm": 0.9276081323623657,
"learning_rate": 1.9604859455468587e-06,
"loss": 1.2022,
"step": 270
},
{
"epoch": 2.168,
"grad_norm": 0.9139848947525024,
"learning_rate": 1.926241244478496e-06,
"loss": 1.1245,
"step": 271
},
{
"epoch": 2.176,
"grad_norm": 0.9088538885116577,
"learning_rate": 1.8922267689835806e-06,
"loss": 1.1375,
"step": 272
},
{
"epoch": 2.184,
"grad_norm": 0.9474396705627441,
"learning_rate": 1.8584450667578656e-06,
"loss": 1.178,
"step": 273
},
{
"epoch": 2.192,
"grad_norm": 0.9475589394569397,
"learning_rate": 1.8248986680623077e-06,
"loss": 1.1776,
"step": 274
},
{
"epoch": 2.2,
"grad_norm": 0.9256978034973145,
"learning_rate": 1.7915900855335506e-06,
"loss": 1.0931,
"step": 275
},
{
"epoch": 2.208,
"grad_norm": 0.9370779991149902,
"learning_rate": 1.7585218139957205e-06,
"loss": 1.1597,
"step": 276
},
{
"epoch": 2.216,
"grad_norm": 1.0122766494750977,
"learning_rate": 1.7256963302735752e-06,
"loss": 1.2446,
"step": 277
},
{
"epoch": 2.224,
"grad_norm": 0.9397243857383728,
"learning_rate": 1.6931160930069789e-06,
"loss": 1.202,
"step": 278
},
{
"epoch": 2.232,
"grad_norm": 0.9453902840614319,
"learning_rate": 1.6607835424667578e-06,
"loss": 1.1506,
"step": 279
},
{
"epoch": 2.24,
"grad_norm": 0.9595220685005188,
"learning_rate": 1.6287011003719105e-06,
"loss": 1.093,
"step": 280
},
{
"epoch": 2.248,
"grad_norm": 0.9128125905990601,
"learning_rate": 1.596871169708235e-06,
"loss": 1.2059,
"step": 281
},
{
"epoch": 2.2560000000000002,
"grad_norm": 0.9542051553726196,
"learning_rate": 1.5652961345483353e-06,
"loss": 1.1799,
"step": 282
},
{
"epoch": 2.2640000000000002,
"grad_norm": 0.9437090754508972,
"learning_rate": 1.5339783598730568e-06,
"loss": 1.1929,
"step": 283
},
{
"epoch": 2.2720000000000002,
"grad_norm": 0.9622453451156616,
"learning_rate": 1.5029201913943425e-06,
"loss": 1.2047,
"step": 284
},
{
"epoch": 2.2800000000000002,
"grad_norm": 0.9787655472755432,
"learning_rate": 1.4721239553795485e-06,
"loss": 1.2801,
"step": 285
},
{
"epoch": 2.288,
"grad_norm": 0.9507718682289124,
"learning_rate": 1.4415919584771999e-06,
"loss": 1.1508,
"step": 286
},
{
"epoch": 2.296,
"grad_norm": 0.9262567162513733,
"learning_rate": 1.4113264875442201e-06,
"loss": 1.171,
"step": 287
},
{
"epoch": 2.304,
"grad_norm": 0.9207803606987,
"learning_rate": 1.3813298094746491e-06,
"loss": 1.0647,
"step": 288
},
{
"epoch": 2.312,
"grad_norm": 0.9652720093727112,
"learning_rate": 1.35160417102985e-06,
"loss": 1.21,
"step": 289
},
{
"epoch": 2.32,
"grad_norm": 0.9531212449073792,
"learning_rate": 1.3221517986702249e-06,
"loss": 1.3148,
"step": 290
},
{
"epoch": 2.328,
"grad_norm": 0.9690611362457275,
"learning_rate": 1.292974898388456e-06,
"loss": 1.133,
"step": 291
},
{
"epoch": 2.336,
"grad_norm": 0.933113157749176,
"learning_rate": 1.2640756555442684e-06,
"loss": 1.1668,
"step": 292
},
{
"epoch": 2.344,
"grad_norm": 0.903457522392273,
"learning_rate": 1.235456234700756e-06,
"loss": 1.1914,
"step": 293
},
{
"epoch": 2.352,
"grad_norm": 0.9584729075431824,
"learning_rate": 1.207118779462248e-06,
"loss": 1.1777,
"step": 294
},
{
"epoch": 2.36,
"grad_norm": 1.0123586654663086,
"learning_rate": 1.1790654123137552e-06,
"loss": 1.1946,
"step": 295
},
{
"epoch": 2.368,
"grad_norm": 1.0037939548492432,
"learning_rate": 1.1512982344619904e-06,
"loss": 1.2927,
"step": 296
},
{
"epoch": 2.376,
"grad_norm": 0.9953064322471619,
"learning_rate": 1.1238193256779955e-06,
"loss": 1.3264,
"step": 297
},
{
"epoch": 2.384,
"grad_norm": 1.0182257890701294,
"learning_rate": 1.0966307441413598e-06,
"loss": 1.1768,
"step": 298
},
{
"epoch": 2.392,
"grad_norm": 0.9684757590293884,
"learning_rate": 1.0697345262860638e-06,
"loss": 1.1419,
"step": 299
},
{
"epoch": 2.4,
"grad_norm": 0.9765399098396301,
"learning_rate": 1.0431326866479457e-06,
"loss": 1.1746,
"step": 300
},
{
"epoch": 2.408,
"grad_norm": 0.9365405440330505,
"learning_rate": 1.01682721771382e-06,
"loss": 1.2397,
"step": 301
},
{
"epoch": 2.416,
"grad_norm": 0.9802318811416626,
"learning_rate": 9.908200897722332e-07,
"loss": 1.1136,
"step": 302
},
{
"epoch": 2.424,
"grad_norm": 1.0139752626419067,
"learning_rate": 9.6511325076589e-07,
"loss": 1.2358,
"step": 303
},
{
"epoch": 2.432,
"grad_norm": 0.9535879492759705,
"learning_rate": 9.397086261457511e-07,
"loss": 1.1577,
"step": 304
},
{
"epoch": 2.44,
"grad_norm": 0.9084758162498474,
"learning_rate": 9.146081187268185e-07,
"loss": 1.1313,
"step": 305
},
{
"epoch": 2.448,
"grad_norm": 1.0524094104766846,
"learning_rate": 8.898136085456127e-07,
"loss": 1.2469,
"step": 306
},
{
"epoch": 2.456,
"grad_norm": 0.9283274412155151,
"learning_rate": 8.65326952719357e-07,
"loss": 1.1897,
"step": 307
},
{
"epoch": 2.464,
"grad_norm": 0.9858782291412354,
"learning_rate": 8.411499853068783e-07,
"loss": 1.2205,
"step": 308
},
{
"epoch": 2.472,
"grad_norm": 0.961403489112854,
"learning_rate": 8.172845171712379e-07,
"loss": 1.1335,
"step": 309
},
{
"epoch": 2.48,
"grad_norm": 0.9039857387542725,
"learning_rate": 7.937323358440935e-07,
"loss": 1.1261,
"step": 310
},
{
"epoch": 2.488,
"grad_norm": 0.8671655654907227,
"learning_rate": 7.70495205391818e-07,
"loss": 1.1149,
"step": 311
},
{
"epoch": 2.496,
"grad_norm": 0.9556377530097961,
"learning_rate": 7.475748662833615e-07,
"loss": 1.2633,
"step": 312
},
{
"epoch": 2.504,
"grad_norm": 0.9426801204681396,
"learning_rate": 7.249730352599e-07,
"loss": 1.2781,
"step": 313
},
{
"epoch": 2.512,
"grad_norm": 0.9554891586303711,
"learning_rate": 7.026914052062433e-07,
"loss": 1.1741,
"step": 314
},
{
"epoch": 2.52,
"grad_norm": 0.948947012424469,
"learning_rate": 6.807316450240425e-07,
"loss": 1.183,
"step": 315
},
{
"epoch": 2.528,
"grad_norm": 0.9793767929077148,
"learning_rate": 6.590953995067812e-07,
"loss": 1.2157,
"step": 316
},
{
"epoch": 2.536,
"grad_norm": 0.9236152172088623,
"learning_rate": 6.377842892165892e-07,
"loss": 1.1956,
"step": 317
},
{
"epoch": 2.544,
"grad_norm": 0.89536452293396,
"learning_rate": 6.167999103628569e-07,
"loss": 1.1124,
"step": 318
},
{
"epoch": 2.552,
"grad_norm": 0.8991439938545227,
"learning_rate": 5.961438346826792e-07,
"loss": 1.2367,
"step": 319
},
{
"epoch": 2.56,
"grad_norm": 0.9654480814933777,
"learning_rate": 5.758176093231294e-07,
"loss": 1.1769,
"step": 320
},
{
"epoch": 2.568,
"grad_norm": 0.9762253165245056,
"learning_rate": 5.558227567253832e-07,
"loss": 1.1362,
"step": 321
},
{
"epoch": 2.576,
"grad_norm": 0.9315025210380554,
"learning_rate": 5.361607745106817e-07,
"loss": 1.0455,
"step": 322
},
{
"epoch": 2.584,
"grad_norm": 1.0394562482833862,
"learning_rate": 5.168331353681643e-07,
"loss": 1.2309,
"step": 323
},
{
"epoch": 2.592,
"grad_norm": 0.9258297681808472,
"learning_rate": 4.97841286944557e-07,
"loss": 1.1187,
"step": 324
},
{
"epoch": 2.6,
"grad_norm": 0.9597828388214111,
"learning_rate": 4.791866517357491e-07,
"loss": 1.2398,
"step": 325
},
{
"epoch": 2.608,
"grad_norm": 0.9284036159515381,
"learning_rate": 4.608706269802471e-07,
"loss": 1.1547,
"step": 326
},
{
"epoch": 2.616,
"grad_norm": 1.011851191520691,
"learning_rate": 4.428945845545168e-07,
"loss": 1.2571,
"step": 327
},
{
"epoch": 2.624,
"grad_norm": 0.9457777142524719,
"learning_rate": 4.2525987087023433e-07,
"loss": 1.2648,
"step": 328
},
{
"epoch": 2.632,
"grad_norm": 0.9738786220550537,
"learning_rate": 4.0796780677343606e-07,
"loss": 1.211,
"step": 329
},
{
"epoch": 2.64,
"grad_norm": 0.9281070232391357,
"learning_rate": 3.910196874455896e-07,
"loss": 1.1852,
"step": 330
},
{
"epoch": 2.648,
"grad_norm": 0.9425092339515686,
"learning_rate": 3.744167823065814e-07,
"loss": 1.2286,
"step": 331
},
{
"epoch": 2.656,
"grad_norm": 0.9291054606437683,
"learning_rate": 3.581603349196372e-07,
"loss": 1.1621,
"step": 332
},
{
"epoch": 2.664,
"grad_norm": 0.92279052734375,
"learning_rate": 3.4225156289818096e-07,
"loss": 1.2425,
"step": 333
},
{
"epoch": 2.672,
"grad_norm": 0.9327970743179321,
"learning_rate": 3.26691657814634e-07,
"loss": 1.1193,
"step": 334
},
{
"epoch": 2.68,
"grad_norm": 0.9604306817054749,
"learning_rate": 3.1148178511116624e-07,
"loss": 1.1053,
"step": 335
},
{
"epoch": 2.6879999999999997,
"grad_norm": 1.009303331375122,
"learning_rate": 2.966230840124007e-07,
"loss": 1.1874,
"step": 336
},
{
"epoch": 2.6959999999999997,
"grad_norm": 0.9507207274436951,
"learning_rate": 2.821166674400905e-07,
"loss": 1.1105,
"step": 337
},
{
"epoch": 2.7039999999999997,
"grad_norm": 0.9599238634109497,
"learning_rate": 2.6796362192975766e-07,
"loss": 1.1718,
"step": 338
},
{
"epoch": 2.7119999999999997,
"grad_norm": 0.8968420624732971,
"learning_rate": 2.5416500754931294e-07,
"loss": 1.0511,
"step": 339
},
{
"epoch": 2.7199999999999998,
"grad_norm": 0.9986500144004822,
"learning_rate": 2.407218578196524e-07,
"loss": 1.2698,
"step": 340
},
{
"epoch": 2.7279999999999998,
"grad_norm": 0.9403897523880005,
"learning_rate": 2.2763517963725169e-07,
"loss": 1.2182,
"step": 341
},
{
"epoch": 2.7359999999999998,
"grad_norm": 0.9974818825721741,
"learning_rate": 2.1490595319874574e-07,
"loss": 1.2383,
"step": 342
},
{
"epoch": 2.7439999999999998,
"grad_norm": 1.008009910583496,
"learning_rate": 2.0253513192751374e-07,
"loss": 1.1606,
"step": 343
},
{
"epoch": 2.752,
"grad_norm": 1.019026279449463,
"learning_rate": 1.905236424022633e-07,
"loss": 1.1782,
"step": 344
},
{
"epoch": 2.76,
"grad_norm": 0.9780669808387756,
"learning_rate": 1.7887238428763553e-07,
"loss": 1.1847,
"step": 345
},
{
"epoch": 2.768,
"grad_norm": 1.0105910301208496,
"learning_rate": 1.6758223026681507e-07,
"loss": 1.2057,
"step": 346
},
{
"epoch": 2.776,
"grad_norm": 0.934592068195343,
"learning_rate": 1.5665402597616842e-07,
"loss": 1.0974,
"step": 347
},
{
"epoch": 2.784,
"grad_norm": 0.9480429887771606,
"learning_rate": 1.4608858994190344e-07,
"loss": 1.169,
"step": 348
},
{
"epoch": 2.792,
"grad_norm": 0.8923419713973999,
"learning_rate": 1.358867135187636e-07,
"loss": 1.1029,
"step": 349
},
{
"epoch": 2.8,
"grad_norm": 1.0625927448272705,
"learning_rate": 1.2604916083075236e-07,
"loss": 1.1654,
"step": 350
},
{
"epoch": 2.808,
"grad_norm": 0.9121294617652893,
"learning_rate": 1.1657666871390471e-07,
"loss": 1.0817,
"step": 351
},
{
"epoch": 2.816,
"grad_norm": 1.002794861793518,
"learning_rate": 1.0746994666109234e-07,
"loss": 1.3023,
"step": 352
},
{
"epoch": 2.824,
"grad_norm": 0.9711022973060608,
"learning_rate": 9.872967676888611e-08,
"loss": 1.1863,
"step": 353
},
{
"epoch": 2.832,
"grad_norm": 0.9451983571052551,
"learning_rate": 9.035651368646647e-08,
"loss": 1.1599,
"step": 354
},
{
"epoch": 2.84,
"grad_norm": 0.9576354026794434,
"learning_rate": 8.235108456658814e-08,
"loss": 1.2885,
"step": 355
},
{
"epoch": 2.848,
"grad_norm": 0.9445213675498962,
"learning_rate": 7.471398901860772e-08,
"loss": 1.2252,
"step": 356
},
{
"epoch": 2.856,
"grad_norm": 1.0621612071990967,
"learning_rate": 6.744579906357185e-08,
"loss": 1.2234,
"step": 357
},
{
"epoch": 2.864,
"grad_norm": 0.920613169670105,
"learning_rate": 6.054705909137426e-08,
"loss": 1.1826,
"step": 358
},
{
"epoch": 2.872,
"grad_norm": 1.0022947788238525,
"learning_rate": 5.401828581997948e-08,
"loss": 1.1835,
"step": 359
},
{
"epoch": 2.88,
"grad_norm": 0.9064760208129883,
"learning_rate": 4.7859968256719344e-08,
"loss": 1.0867,
"step": 360
},
{
"epoch": 2.888,
"grad_norm": 0.9370819926261902,
"learning_rate": 4.207256766166845e-08,
"loss": 1.1871,
"step": 361
},
{
"epoch": 2.896,
"grad_norm": 1.0383437871932983,
"learning_rate": 3.665651751309451e-08,
"loss": 1.2517,
"step": 362
},
{
"epoch": 2.904,
"grad_norm": 0.962099552154541,
"learning_rate": 3.16122234749916e-08,
"loss": 1.2392,
"step": 363
},
{
"epoch": 2.912,
"grad_norm": 0.9519680142402649,
"learning_rate": 2.6940063366693303e-08,
"loss": 1.0573,
"step": 364
},
{
"epoch": 2.92,
"grad_norm": 0.9429070353507996,
"learning_rate": 2.264038713457706e-08,
"loss": 1.1827,
"step": 365
},
{
"epoch": 2.928,
"grad_norm": 0.9092634916305542,
"learning_rate": 1.8713516825851207e-08,
"loss": 1.1677,
"step": 366
},
{
"epoch": 2.936,
"grad_norm": 0.9508703351020813,
"learning_rate": 1.51597465644332e-08,
"loss": 1.15,
"step": 367
},
{
"epoch": 2.944,
"grad_norm": 0.9615942239761353,
"learning_rate": 1.1979342528922189e-08,
"loss": 1.067,
"step": 368
},
{
"epoch": 2.952,
"grad_norm": 0.9065539240837097,
"learning_rate": 9.1725429326589e-09,
"loss": 1.1662,
"step": 369
},
{
"epoch": 2.96,
"grad_norm": 0.9946824312210083,
"learning_rate": 6.739558005884883e-09,
"loss": 1.2158,
"step": 370
},
{
"epoch": 2.968,
"grad_norm": 0.9945969581604004,
"learning_rate": 4.6805699799967744e-09,
"loss": 1.2243,
"step": 371
},
{
"epoch": 2.976,
"grad_norm": 1.0351194143295288,
"learning_rate": 2.995733073895557e-09,
"loss": 1.2542,
"step": 372
},
{
"epoch": 2.984,
"grad_norm": 0.9449812769889832,
"learning_rate": 1.6851734824380184e-09,
"loss": 1.1568,
"step": 373
},
{
"epoch": 2.992,
"grad_norm": 0.9165475368499756,
"learning_rate": 7.48989366980979e-10,
"loss": 1.1133,
"step": 374
},
{
"epoch": 3.0,
"grad_norm": 1.0071150064468384,
"learning_rate": 1.872508480332824e-10,
"loss": 1.1486,
"step": 375
},
{
"epoch": 3.0,
"step": 375,
"total_flos": 62777166004224.0,
"train_loss": 1.4248257904052735,
"train_runtime": 9087.8291,
"train_samples_per_second": 1.32,
"train_steps_per_second": 0.041
}
],
"logging_steps": 1,
"max_steps": 375,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 62777166004224.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}