| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 1347, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0022271714922048997, | |
| "grad_norm": 57.42307920658086, | |
| "learning_rate": 0.0, | |
| "loss": 11.1055, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.004454342984409799, | |
| "grad_norm": 55.4142581004584, | |
| "learning_rate": 3.703703703703704e-07, | |
| "loss": 11.1441, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0066815144766146995, | |
| "grad_norm": 56.04170034902961, | |
| "learning_rate": 7.407407407407408e-07, | |
| "loss": 11.1293, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.008908685968819599, | |
| "grad_norm": 54.95758433278382, | |
| "learning_rate": 1.1111111111111112e-06, | |
| "loss": 11.0293, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.011135857461024499, | |
| "grad_norm": 57.029873140763165, | |
| "learning_rate": 1.4814814814814817e-06, | |
| "loss": 11.1025, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.013363028953229399, | |
| "grad_norm": 58.88736752344377, | |
| "learning_rate": 1.8518518518518519e-06, | |
| "loss": 10.9889, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.015590200445434299, | |
| "grad_norm": 64.03379235623899, | |
| "learning_rate": 2.2222222222222225e-06, | |
| "loss": 10.6547, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.017817371937639197, | |
| "grad_norm": 87.78214617261006, | |
| "learning_rate": 2.5925925925925925e-06, | |
| "loss": 9.6951, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.0200445434298441, | |
| "grad_norm": 99.69188628493322, | |
| "learning_rate": 2.9629629629629633e-06, | |
| "loss": 9.124, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.022271714922048998, | |
| "grad_norm": 111.96002267549564, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 8.5551, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.024498886414253896, | |
| "grad_norm": 63.81628779520451, | |
| "learning_rate": 3.7037037037037037e-06, | |
| "loss": 3.6613, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.026726057906458798, | |
| "grad_norm": 54.32876659142105, | |
| "learning_rate": 4.074074074074075e-06, | |
| "loss": 3.2366, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.028953229398663696, | |
| "grad_norm": 36.65458024792978, | |
| "learning_rate": 4.444444444444445e-06, | |
| "loss": 2.5217, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.031180400890868598, | |
| "grad_norm": 30.57221885493666, | |
| "learning_rate": 4.814814814814815e-06, | |
| "loss": 2.317, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.0334075723830735, | |
| "grad_norm": 6.25280699614946, | |
| "learning_rate": 5.185185185185185e-06, | |
| "loss": 1.3326, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.035634743875278395, | |
| "grad_norm": 4.862666131069493, | |
| "learning_rate": 5.555555555555556e-06, | |
| "loss": 1.2688, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.0378619153674833, | |
| "grad_norm": 3.488769157504774, | |
| "learning_rate": 5.925925925925927e-06, | |
| "loss": 1.1812, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.0400890868596882, | |
| "grad_norm": 2.8021514727775503, | |
| "learning_rate": 6.296296296296296e-06, | |
| "loss": 1.1316, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.042316258351893093, | |
| "grad_norm": 2.199388422339967, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 1.0683, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.044543429844097995, | |
| "grad_norm": 5.525393881777902, | |
| "learning_rate": 7.0370370370370375e-06, | |
| "loss": 0.9047, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0467706013363029, | |
| "grad_norm": 11.02138041002174, | |
| "learning_rate": 7.4074074074074075e-06, | |
| "loss": 0.8999, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.04899777282850779, | |
| "grad_norm": 1.5453174995875891, | |
| "learning_rate": 7.777777777777777e-06, | |
| "loss": 0.9263, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.051224944320712694, | |
| "grad_norm": 1.1422921936760477, | |
| "learning_rate": 8.14814814814815e-06, | |
| "loss": 0.8617, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.053452115812917596, | |
| "grad_norm": 0.950098543532029, | |
| "learning_rate": 8.518518518518519e-06, | |
| "loss": 0.8579, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.0556792873051225, | |
| "grad_norm": 0.8460261969278372, | |
| "learning_rate": 8.88888888888889e-06, | |
| "loss": 0.8283, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.05790645879732739, | |
| "grad_norm": 0.7528175819870679, | |
| "learning_rate": 9.259259259259259e-06, | |
| "loss": 0.8063, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.060133630289532294, | |
| "grad_norm": 0.7930777664338834, | |
| "learning_rate": 9.62962962962963e-06, | |
| "loss": 0.7268, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.062360801781737196, | |
| "grad_norm": 0.7913200183157787, | |
| "learning_rate": 1e-05, | |
| "loss": 0.7574, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.0645879732739421, | |
| "grad_norm": 0.6627676019174363, | |
| "learning_rate": 1.037037037037037e-05, | |
| "loss": 0.7387, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.066815144766147, | |
| "grad_norm": 0.6189513082849771, | |
| "learning_rate": 1.074074074074074e-05, | |
| "loss": 0.719, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.06904231625835189, | |
| "grad_norm": 0.6392565046924722, | |
| "learning_rate": 1.1111111111111112e-05, | |
| "loss": 0.6966, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.07126948775055679, | |
| "grad_norm": 0.7613554413263333, | |
| "learning_rate": 1.1481481481481482e-05, | |
| "loss": 0.6991, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.07349665924276169, | |
| "grad_norm": 0.8495800994596282, | |
| "learning_rate": 1.1851851851851853e-05, | |
| "loss": 0.6658, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.0757238307349666, | |
| "grad_norm": 0.45250299215013506, | |
| "learning_rate": 1.2222222222222222e-05, | |
| "loss": 0.6828, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.0779510022271715, | |
| "grad_norm": 0.47839604371584693, | |
| "learning_rate": 1.2592592592592592e-05, | |
| "loss": 0.6472, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.0801781737193764, | |
| "grad_norm": 0.5706925261506277, | |
| "learning_rate": 1.2962962962962962e-05, | |
| "loss": 0.6527, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.08240534521158129, | |
| "grad_norm": 0.5490722862903289, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.6394, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.08463251670378619, | |
| "grad_norm": 0.3997578606482899, | |
| "learning_rate": 1.3703703703703704e-05, | |
| "loss": 0.6088, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.08685968819599109, | |
| "grad_norm": 0.3888538634363526, | |
| "learning_rate": 1.4074074074074075e-05, | |
| "loss": 0.6347, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.08908685968819599, | |
| "grad_norm": 0.4842890885503436, | |
| "learning_rate": 1.4444444444444444e-05, | |
| "loss": 0.6166, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.09131403118040089, | |
| "grad_norm": 0.518705372079969, | |
| "learning_rate": 1.4814814814814815e-05, | |
| "loss": 0.62, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.0935412026726058, | |
| "grad_norm": 0.35931037655330894, | |
| "learning_rate": 1.5185185185185186e-05, | |
| "loss": 0.6058, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.0957683741648107, | |
| "grad_norm": 0.33027312330089215, | |
| "learning_rate": 1.5555555555555555e-05, | |
| "loss": 0.6302, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.09799554565701558, | |
| "grad_norm": 0.36257798617418846, | |
| "learning_rate": 1.5925925925925926e-05, | |
| "loss": 0.6142, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.10022271714922049, | |
| "grad_norm": 0.37194278361077726, | |
| "learning_rate": 1.62962962962963e-05, | |
| "loss": 0.5843, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.10244988864142539, | |
| "grad_norm": 0.3371545105094365, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.5784, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.10467706013363029, | |
| "grad_norm": 0.3297761659143661, | |
| "learning_rate": 1.7037037037037038e-05, | |
| "loss": 0.6282, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.10690423162583519, | |
| "grad_norm": 0.2835121491028332, | |
| "learning_rate": 1.740740740740741e-05, | |
| "loss": 0.5727, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.1091314031180401, | |
| "grad_norm": 0.36396162705888246, | |
| "learning_rate": 1.777777777777778e-05, | |
| "loss": 0.5801, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.111358574610245, | |
| "grad_norm": 0.3485481215402162, | |
| "learning_rate": 1.814814814814815e-05, | |
| "loss": 0.5802, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.11358574610244988, | |
| "grad_norm": 0.2901852671276199, | |
| "learning_rate": 1.8518518518518518e-05, | |
| "loss": 0.572, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.11581291759465479, | |
| "grad_norm": 0.33239940938527635, | |
| "learning_rate": 1.888888888888889e-05, | |
| "loss": 0.5871, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.11804008908685969, | |
| "grad_norm": 0.32272368620931075, | |
| "learning_rate": 1.925925925925926e-05, | |
| "loss": 0.5893, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.12026726057906459, | |
| "grad_norm": 0.310668770698013, | |
| "learning_rate": 1.962962962962963e-05, | |
| "loss": 0.5813, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.12249443207126949, | |
| "grad_norm": 0.2862388442261126, | |
| "learning_rate": 2e-05, | |
| "loss": 0.5555, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.12472160356347439, | |
| "grad_norm": 0.28836336316118216, | |
| "learning_rate": 2.037037037037037e-05, | |
| "loss": 0.5573, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.12694877505567928, | |
| "grad_norm": 0.3343688470290457, | |
| "learning_rate": 2.074074074074074e-05, | |
| "loss": 0.5607, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.1291759465478842, | |
| "grad_norm": 0.27729103785852094, | |
| "learning_rate": 2.111111111111111e-05, | |
| "loss": 0.5479, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.13140311804008908, | |
| "grad_norm": 0.3278121714571836, | |
| "learning_rate": 2.148148148148148e-05, | |
| "loss": 0.5583, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.133630289532294, | |
| "grad_norm": 0.25748897821127487, | |
| "learning_rate": 2.1851851851851852e-05, | |
| "loss": 0.5662, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.1358574610244989, | |
| "grad_norm": 0.26680458322672634, | |
| "learning_rate": 2.2222222222222223e-05, | |
| "loss": 0.531, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.13808463251670378, | |
| "grad_norm": 0.2712193652049242, | |
| "learning_rate": 2.2592592592592594e-05, | |
| "loss": 0.5297, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.1403118040089087, | |
| "grad_norm": 0.2549335734827084, | |
| "learning_rate": 2.2962962962962965e-05, | |
| "loss": 0.5468, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.14253897550111358, | |
| "grad_norm": 0.28875698353844087, | |
| "learning_rate": 2.3333333333333336e-05, | |
| "loss": 0.5219, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.1447661469933185, | |
| "grad_norm": 0.27391864481987854, | |
| "learning_rate": 2.3703703703703707e-05, | |
| "loss": 0.5204, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.14699331848552338, | |
| "grad_norm": 0.2692725207257318, | |
| "learning_rate": 2.4074074074074074e-05, | |
| "loss": 0.548, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.1492204899777283, | |
| "grad_norm": 0.26312996576101033, | |
| "learning_rate": 2.4444444444444445e-05, | |
| "loss": 0.5306, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.1514476614699332, | |
| "grad_norm": 0.24673062381809363, | |
| "learning_rate": 2.4814814814814816e-05, | |
| "loss": 0.5293, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.15367483296213807, | |
| "grad_norm": 0.2544114168493051, | |
| "learning_rate": 2.5185185185185183e-05, | |
| "loss": 0.5334, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.155902004454343, | |
| "grad_norm": 0.268517709442818, | |
| "learning_rate": 2.5555555555555554e-05, | |
| "loss": 0.5563, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.15812917594654788, | |
| "grad_norm": 0.2489177187052713, | |
| "learning_rate": 2.5925925925925925e-05, | |
| "loss": 0.5362, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.1603563474387528, | |
| "grad_norm": 0.2615863452691099, | |
| "learning_rate": 2.6296296296296296e-05, | |
| "loss": 0.5451, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.16258351893095768, | |
| "grad_norm": 0.26501318885913844, | |
| "learning_rate": 2.6666666666666667e-05, | |
| "loss": 0.5306, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.16481069042316257, | |
| "grad_norm": 0.24947289257521604, | |
| "learning_rate": 2.7037037037037037e-05, | |
| "loss": 0.5208, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.16703786191536749, | |
| "grad_norm": 0.28127140174684223, | |
| "learning_rate": 2.7407407407407408e-05, | |
| "loss": 0.5338, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.16926503340757237, | |
| "grad_norm": 0.26849164318476576, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 0.5386, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.1714922048997773, | |
| "grad_norm": 0.263834539821763, | |
| "learning_rate": 2.814814814814815e-05, | |
| "loss": 0.5436, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.17371937639198218, | |
| "grad_norm": 0.2566975308908174, | |
| "learning_rate": 2.851851851851852e-05, | |
| "loss": 0.5352, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.1759465478841871, | |
| "grad_norm": 0.2993215207257922, | |
| "learning_rate": 2.8888888888888888e-05, | |
| "loss": 0.5334, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.17817371937639198, | |
| "grad_norm": 0.2649930701553602, | |
| "learning_rate": 2.925925925925926e-05, | |
| "loss": 0.529, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.18040089086859687, | |
| "grad_norm": 0.2499755228378194, | |
| "learning_rate": 2.962962962962963e-05, | |
| "loss": 0.5129, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.18262806236080179, | |
| "grad_norm": 0.25585388447729907, | |
| "learning_rate": 3e-05, | |
| "loss": 0.5031, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.18485523385300667, | |
| "grad_norm": 0.2804971309732976, | |
| "learning_rate": 3.037037037037037e-05, | |
| "loss": 0.52, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.1870824053452116, | |
| "grad_norm": 0.30532069489713315, | |
| "learning_rate": 3.074074074074074e-05, | |
| "loss": 0.5113, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.18930957683741648, | |
| "grad_norm": 0.31239584228708184, | |
| "learning_rate": 3.111111111111111e-05, | |
| "loss": 0.5046, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.1915367483296214, | |
| "grad_norm": 0.29923465866612675, | |
| "learning_rate": 3.148148148148148e-05, | |
| "loss": 0.5171, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.19376391982182628, | |
| "grad_norm": 0.3220264210471262, | |
| "learning_rate": 3.185185185185185e-05, | |
| "loss": 0.5306, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.19599109131403117, | |
| "grad_norm": 0.3015679068134566, | |
| "learning_rate": 3.222222222222223e-05, | |
| "loss": 0.5029, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.19821826280623608, | |
| "grad_norm": 0.2791608421023344, | |
| "learning_rate": 3.25925925925926e-05, | |
| "loss": 0.4814, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.20044543429844097, | |
| "grad_norm": 0.2659375754124709, | |
| "learning_rate": 3.2962962962962964e-05, | |
| "loss": 0.5111, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.2026726057906459, | |
| "grad_norm": 0.2997121598755476, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 0.4969, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.20489977728285078, | |
| "grad_norm": 0.27277673860961665, | |
| "learning_rate": 3.3703703703703706e-05, | |
| "loss": 0.525, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.2071269487750557, | |
| "grad_norm": 0.2694873726831575, | |
| "learning_rate": 3.4074074074074077e-05, | |
| "loss": 0.5277, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.20935412026726058, | |
| "grad_norm": 0.3357690547348528, | |
| "learning_rate": 3.444444444444445e-05, | |
| "loss": 0.4768, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.21158129175946547, | |
| "grad_norm": 0.3097028888192043, | |
| "learning_rate": 3.481481481481482e-05, | |
| "loss": 0.5327, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.21380846325167038, | |
| "grad_norm": 0.274982135970598, | |
| "learning_rate": 3.518518518518519e-05, | |
| "loss": 0.5133, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.21603563474387527, | |
| "grad_norm": 0.2689619091248766, | |
| "learning_rate": 3.555555555555556e-05, | |
| "loss": 0.5061, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.2182628062360802, | |
| "grad_norm": 0.276310674407535, | |
| "learning_rate": 3.592592592592593e-05, | |
| "loss": 0.5023, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.22048997772828507, | |
| "grad_norm": 0.2925539174112595, | |
| "learning_rate": 3.62962962962963e-05, | |
| "loss": 0.4902, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.22271714922049, | |
| "grad_norm": 0.2764873755412426, | |
| "learning_rate": 3.6666666666666666e-05, | |
| "loss": 0.4928, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.22494432071269488, | |
| "grad_norm": 0.30596253382561084, | |
| "learning_rate": 3.7037037037037037e-05, | |
| "loss": 0.4947, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.22717149220489977, | |
| "grad_norm": 0.2507900196157514, | |
| "learning_rate": 3.740740740740741e-05, | |
| "loss": 0.4979, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.22939866369710468, | |
| "grad_norm": 0.41155747976418094, | |
| "learning_rate": 3.777777777777778e-05, | |
| "loss": 0.5044, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.23162583518930957, | |
| "grad_norm": 0.40004605557757594, | |
| "learning_rate": 3.814814814814815e-05, | |
| "loss": 0.4914, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.23385300668151449, | |
| "grad_norm": 0.3158821021617771, | |
| "learning_rate": 3.851851851851852e-05, | |
| "loss": 0.4949, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.23608017817371937, | |
| "grad_norm": 0.43094171027762884, | |
| "learning_rate": 3.888888888888889e-05, | |
| "loss": 0.5046, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.2383073496659243, | |
| "grad_norm": 0.3682504918929769, | |
| "learning_rate": 3.925925925925926e-05, | |
| "loss": 0.4602, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.24053452115812918, | |
| "grad_norm": 0.33297596471036733, | |
| "learning_rate": 3.962962962962963e-05, | |
| "loss": 0.5138, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.24276169265033407, | |
| "grad_norm": 0.3721638469385512, | |
| "learning_rate": 4e-05, | |
| "loss": 0.4816, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.24498886414253898, | |
| "grad_norm": 0.3525132455331721, | |
| "learning_rate": 4.0370370370370374e-05, | |
| "loss": 0.491, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.24721603563474387, | |
| "grad_norm": 0.3284661170025923, | |
| "learning_rate": 4.074074074074074e-05, | |
| "loss": 0.5086, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.24944320712694878, | |
| "grad_norm": 0.4553024801346518, | |
| "learning_rate": 4.111111111111111e-05, | |
| "loss": 0.4967, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.2516703786191537, | |
| "grad_norm": 0.36062775373958933, | |
| "learning_rate": 4.148148148148148e-05, | |
| "loss": 0.5107, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.25389755011135856, | |
| "grad_norm": 0.3139139462586485, | |
| "learning_rate": 4.185185185185185e-05, | |
| "loss": 0.4931, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.2561247216035635, | |
| "grad_norm": 0.43508729251982736, | |
| "learning_rate": 4.222222222222222e-05, | |
| "loss": 0.5057, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.2583518930957684, | |
| "grad_norm": 0.37579492262839576, | |
| "learning_rate": 4.259259259259259e-05, | |
| "loss": 0.4827, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.26057906458797325, | |
| "grad_norm": 0.3135964755412191, | |
| "learning_rate": 4.296296296296296e-05, | |
| "loss": 0.4951, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.26280623608017817, | |
| "grad_norm": 0.5103971124839464, | |
| "learning_rate": 4.3333333333333334e-05, | |
| "loss": 0.4883, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.2650334075723831, | |
| "grad_norm": 0.3886694527089134, | |
| "learning_rate": 4.3703703703703705e-05, | |
| "loss": 0.459, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.267260579064588, | |
| "grad_norm": 0.34873133836486525, | |
| "learning_rate": 4.4074074074074076e-05, | |
| "loss": 0.4935, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.26948775055679286, | |
| "grad_norm": 0.6019576624584538, | |
| "learning_rate": 4.4444444444444447e-05, | |
| "loss": 0.4868, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.2717149220489978, | |
| "grad_norm": 0.36932194797825146, | |
| "learning_rate": 4.481481481481482e-05, | |
| "loss": 0.4784, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.2739420935412027, | |
| "grad_norm": 0.3804288507510714, | |
| "learning_rate": 4.518518518518519e-05, | |
| "loss": 0.4774, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.27616926503340755, | |
| "grad_norm": 0.450739644920063, | |
| "learning_rate": 4.555555555555556e-05, | |
| "loss": 0.4834, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.27839643652561247, | |
| "grad_norm": 0.3166655802453127, | |
| "learning_rate": 4.592592592592593e-05, | |
| "loss": 0.5027, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.2806236080178174, | |
| "grad_norm": 0.3568553819877089, | |
| "learning_rate": 4.62962962962963e-05, | |
| "loss": 0.4719, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.2828507795100223, | |
| "grad_norm": 0.2879304124523538, | |
| "learning_rate": 4.666666666666667e-05, | |
| "loss": 0.4575, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.28507795100222716, | |
| "grad_norm": 0.3859580233017386, | |
| "learning_rate": 4.703703703703704e-05, | |
| "loss": 0.4751, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.2873051224944321, | |
| "grad_norm": 0.32691499386259953, | |
| "learning_rate": 4.740740740740741e-05, | |
| "loss": 0.456, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.289532293986637, | |
| "grad_norm": 0.32665326828281926, | |
| "learning_rate": 4.7777777777777784e-05, | |
| "loss": 0.4691, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.29175946547884185, | |
| "grad_norm": 0.4003858496357345, | |
| "learning_rate": 4.814814814814815e-05, | |
| "loss": 0.5027, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.29398663697104677, | |
| "grad_norm": 0.3287864746571255, | |
| "learning_rate": 4.851851851851852e-05, | |
| "loss": 0.4568, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.2962138084632517, | |
| "grad_norm": 0.34638666355485653, | |
| "learning_rate": 4.888888888888889e-05, | |
| "loss": 0.4548, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.2984409799554566, | |
| "grad_norm": 0.3736338755007743, | |
| "learning_rate": 4.925925925925926e-05, | |
| "loss": 0.4608, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.30066815144766146, | |
| "grad_norm": 0.3606530799746982, | |
| "learning_rate": 4.962962962962963e-05, | |
| "loss": 0.4967, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.3028953229398664, | |
| "grad_norm": 0.35828978658937044, | |
| "learning_rate": 5e-05, | |
| "loss": 0.4663, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.3051224944320713, | |
| "grad_norm": 0.38886007461404093, | |
| "learning_rate": 4.995874587458746e-05, | |
| "loss": 0.4942, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.30734966592427615, | |
| "grad_norm": 0.3492765115474014, | |
| "learning_rate": 4.991749174917492e-05, | |
| "loss": 0.502, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.30957683741648107, | |
| "grad_norm": 0.3260537303402738, | |
| "learning_rate": 4.987623762376238e-05, | |
| "loss": 0.4809, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.311804008908686, | |
| "grad_norm": 0.27678766098703955, | |
| "learning_rate": 4.9834983498349835e-05, | |
| "loss": 0.4662, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.31403118040089084, | |
| "grad_norm": 0.32583906322889694, | |
| "learning_rate": 4.97937293729373e-05, | |
| "loss": 0.4744, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.31625835189309576, | |
| "grad_norm": 0.30968501966537665, | |
| "learning_rate": 4.975247524752475e-05, | |
| "loss": 0.4693, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.3184855233853007, | |
| "grad_norm": 0.300558723706453, | |
| "learning_rate": 4.971122112211221e-05, | |
| "loss": 0.4543, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.3207126948775056, | |
| "grad_norm": 0.3590756342843173, | |
| "learning_rate": 4.9669966996699675e-05, | |
| "loss": 0.4848, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.32293986636971045, | |
| "grad_norm": 0.3103656322051761, | |
| "learning_rate": 4.9628712871287133e-05, | |
| "loss": 0.481, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.32516703786191536, | |
| "grad_norm": 0.36889845947563116, | |
| "learning_rate": 4.9587458745874585e-05, | |
| "loss": 0.4765, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.3273942093541203, | |
| "grad_norm": 0.34252947893290014, | |
| "learning_rate": 4.954620462046205e-05, | |
| "loss": 0.4936, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.32962138084632514, | |
| "grad_norm": 0.3848672223009835, | |
| "learning_rate": 4.950495049504951e-05, | |
| "loss": 0.4498, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.33184855233853006, | |
| "grad_norm": 0.29661062901684304, | |
| "learning_rate": 4.9463696369636967e-05, | |
| "loss": 0.4878, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.33407572383073497, | |
| "grad_norm": 0.3117169655533877, | |
| "learning_rate": 4.9422442244224425e-05, | |
| "loss": 0.4786, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.3363028953229399, | |
| "grad_norm": 0.29938186320927185, | |
| "learning_rate": 4.938118811881188e-05, | |
| "loss": 0.484, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.33853006681514475, | |
| "grad_norm": 0.3677147215204186, | |
| "learning_rate": 4.933993399339934e-05, | |
| "loss": 0.4943, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.34075723830734966, | |
| "grad_norm": 0.31870028405815753, | |
| "learning_rate": 4.92986798679868e-05, | |
| "loss": 0.466, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.3429844097995546, | |
| "grad_norm": 0.30485295371098536, | |
| "learning_rate": 4.925742574257426e-05, | |
| "loss": 0.4716, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.34521158129175944, | |
| "grad_norm": 0.3204799751546297, | |
| "learning_rate": 4.9216171617161716e-05, | |
| "loss": 0.482, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.34743875278396436, | |
| "grad_norm": 0.3495660839808905, | |
| "learning_rate": 4.917491749174918e-05, | |
| "loss": 0.4746, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.34966592427616927, | |
| "grad_norm": 0.3758329591880699, | |
| "learning_rate": 4.913366336633663e-05, | |
| "loss": 0.5131, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.3518930957683742, | |
| "grad_norm": 0.32055625709553975, | |
| "learning_rate": 4.909240924092409e-05, | |
| "loss": 0.4833, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.35412026726057905, | |
| "grad_norm": 0.32097261525837784, | |
| "learning_rate": 4.9051155115511556e-05, | |
| "loss": 0.48, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.35634743875278396, | |
| "grad_norm": 0.33480318109614066, | |
| "learning_rate": 4.9009900990099014e-05, | |
| "loss": 0.4551, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.3585746102449889, | |
| "grad_norm": 0.2781227223952856, | |
| "learning_rate": 4.8968646864686466e-05, | |
| "loss": 0.47, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.36080178173719374, | |
| "grad_norm": 0.38773722729695975, | |
| "learning_rate": 4.892739273927393e-05, | |
| "loss": 0.4873, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.36302895322939865, | |
| "grad_norm": 0.27943792922418004, | |
| "learning_rate": 4.888613861386139e-05, | |
| "loss": 0.4584, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.36525612472160357, | |
| "grad_norm": 0.30003973698376857, | |
| "learning_rate": 4.884488448844885e-05, | |
| "loss": 0.4636, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.3674832962138085, | |
| "grad_norm": 0.3065918295334727, | |
| "learning_rate": 4.8803630363036305e-05, | |
| "loss": 0.4776, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.36971046770601335, | |
| "grad_norm": 0.3201763919174801, | |
| "learning_rate": 4.8762376237623764e-05, | |
| "loss": 0.4961, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.37193763919821826, | |
| "grad_norm": 0.3178300180539279, | |
| "learning_rate": 4.872112211221123e-05, | |
| "loss": 0.4759, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.3741648106904232, | |
| "grad_norm": 0.29842427156720247, | |
| "learning_rate": 4.867986798679868e-05, | |
| "loss": 0.4743, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.37639198218262804, | |
| "grad_norm": 0.32502426510303545, | |
| "learning_rate": 4.863861386138614e-05, | |
| "loss": 0.4669, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.37861915367483295, | |
| "grad_norm": 0.29588998621776147, | |
| "learning_rate": 4.8597359735973603e-05, | |
| "loss": 0.4669, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.38084632516703787, | |
| "grad_norm": 0.2899635288815167, | |
| "learning_rate": 4.855610561056106e-05, | |
| "loss": 0.4855, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.3830734966592428, | |
| "grad_norm": 0.3064392168661253, | |
| "learning_rate": 4.851485148514851e-05, | |
| "loss": 0.4636, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.38530066815144765, | |
| "grad_norm": 0.2986024540889945, | |
| "learning_rate": 4.847359735973598e-05, | |
| "loss": 0.4876, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.38752783964365256, | |
| "grad_norm": 0.30233642625624657, | |
| "learning_rate": 4.8432343234323437e-05, | |
| "loss": 0.4731, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.3897550111358575, | |
| "grad_norm": 0.27146933961382946, | |
| "learning_rate": 4.8391089108910895e-05, | |
| "loss": 0.4724, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.39198218262806234, | |
| "grad_norm": 0.38926742917871604, | |
| "learning_rate": 4.834983498349835e-05, | |
| "loss": 0.4592, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.39420935412026725, | |
| "grad_norm": 0.2918121435091572, | |
| "learning_rate": 4.830858085808581e-05, | |
| "loss": 0.478, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.39643652561247217, | |
| "grad_norm": 0.2733312948862096, | |
| "learning_rate": 4.826732673267327e-05, | |
| "loss": 0.4326, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.3986636971046771, | |
| "grad_norm": 0.28519888504000973, | |
| "learning_rate": 4.822607260726073e-05, | |
| "loss": 0.472, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.40089086859688194, | |
| "grad_norm": 0.3046631075865034, | |
| "learning_rate": 4.8184818481848186e-05, | |
| "loss": 0.4724, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.40311804008908686, | |
| "grad_norm": 0.28826960158466797, | |
| "learning_rate": 4.8143564356435644e-05, | |
| "loss": 0.4741, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.4053452115812918, | |
| "grad_norm": 0.2701880021074098, | |
| "learning_rate": 4.810231023102311e-05, | |
| "loss": 0.4616, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.40757238307349664, | |
| "grad_norm": 0.27599826258023064, | |
| "learning_rate": 4.806105610561056e-05, | |
| "loss": 0.4548, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.40979955456570155, | |
| "grad_norm": 0.30476562089568104, | |
| "learning_rate": 4.801980198019802e-05, | |
| "loss": 0.4479, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.41202672605790647, | |
| "grad_norm": 0.2790459906115023, | |
| "learning_rate": 4.7978547854785484e-05, | |
| "loss": 0.475, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.4142538975501114, | |
| "grad_norm": 0.27612981777114276, | |
| "learning_rate": 4.793729372937294e-05, | |
| "loss": 0.4789, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.41648106904231624, | |
| "grad_norm": 0.32980387723538307, | |
| "learning_rate": 4.7896039603960394e-05, | |
| "loss": 0.4677, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.41870824053452116, | |
| "grad_norm": 0.29955756261599203, | |
| "learning_rate": 4.785478547854786e-05, | |
| "loss": 0.4815, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.4209354120267261, | |
| "grad_norm": 0.35755084539372534, | |
| "learning_rate": 4.781353135313532e-05, | |
| "loss": 0.4582, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.42316258351893093, | |
| "grad_norm": 0.31455989081013724, | |
| "learning_rate": 4.7772277227722775e-05, | |
| "loss": 0.4811, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.42538975501113585, | |
| "grad_norm": 0.29333274730527464, | |
| "learning_rate": 4.7731023102310234e-05, | |
| "loss": 0.467, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.42761692650334077, | |
| "grad_norm": 0.2877037349403691, | |
| "learning_rate": 4.768976897689769e-05, | |
| "loss": 0.459, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.4298440979955457, | |
| "grad_norm": 0.2958273465058758, | |
| "learning_rate": 4.764851485148515e-05, | |
| "loss": 0.4998, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.43207126948775054, | |
| "grad_norm": 0.2781440019522109, | |
| "learning_rate": 4.760726072607261e-05, | |
| "loss": 0.4439, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.43429844097995546, | |
| "grad_norm": 0.283439750955963, | |
| "learning_rate": 4.756600660066007e-05, | |
| "loss": 0.4626, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.4365256124721604, | |
| "grad_norm": 0.2911005370573716, | |
| "learning_rate": 4.7524752475247525e-05, | |
| "loss": 0.4625, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.43875278396436523, | |
| "grad_norm": 0.25120199923795783, | |
| "learning_rate": 4.748349834983499e-05, | |
| "loss": 0.4488, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.44097995545657015, | |
| "grad_norm": 0.2757619493421524, | |
| "learning_rate": 4.744224422442244e-05, | |
| "loss": 0.4513, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.44320712694877507, | |
| "grad_norm": 0.3009536575674645, | |
| "learning_rate": 4.74009900990099e-05, | |
| "loss": 0.4663, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.44543429844098, | |
| "grad_norm": 0.2672399691893589, | |
| "learning_rate": 4.7359735973597365e-05, | |
| "loss": 0.4563, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.44766146993318484, | |
| "grad_norm": 0.31954353984499995, | |
| "learning_rate": 4.731848184818482e-05, | |
| "loss": 0.4651, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.44988864142538976, | |
| "grad_norm": 0.2773874407552882, | |
| "learning_rate": 4.7277227722772274e-05, | |
| "loss": 0.4807, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.4521158129175947, | |
| "grad_norm": 0.26492332012667835, | |
| "learning_rate": 4.723597359735974e-05, | |
| "loss": 0.4543, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.45434298440979953, | |
| "grad_norm": 0.31184436207960836, | |
| "learning_rate": 4.71947194719472e-05, | |
| "loss": 0.4694, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.45657015590200445, | |
| "grad_norm": 0.2731081790924368, | |
| "learning_rate": 4.7153465346534656e-05, | |
| "loss": 0.4572, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.45879732739420936, | |
| "grad_norm": 0.2869232775138865, | |
| "learning_rate": 4.7112211221122114e-05, | |
| "loss": 0.4681, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.4610244988864143, | |
| "grad_norm": 0.2639755248576992, | |
| "learning_rate": 4.707095709570957e-05, | |
| "loss": 0.4602, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.46325167037861914, | |
| "grad_norm": 0.29189504155707585, | |
| "learning_rate": 4.702970297029703e-05, | |
| "loss": 0.4664, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.46547884187082406, | |
| "grad_norm": 0.2601644937321229, | |
| "learning_rate": 4.698844884488449e-05, | |
| "loss": 0.4483, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.46770601336302897, | |
| "grad_norm": 0.27106301913748243, | |
| "learning_rate": 4.694719471947195e-05, | |
| "loss": 0.4716, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.46993318485523383, | |
| "grad_norm": 0.31324919458454953, | |
| "learning_rate": 4.6905940594059406e-05, | |
| "loss": 0.4886, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.47216035634743875, | |
| "grad_norm": 0.28748118504498216, | |
| "learning_rate": 4.686468646864687e-05, | |
| "loss": 0.4462, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.47438752783964366, | |
| "grad_norm": 0.26192959125223536, | |
| "learning_rate": 4.682343234323432e-05, | |
| "loss": 0.466, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.4766146993318486, | |
| "grad_norm": 0.2868158200559901, | |
| "learning_rate": 4.678217821782179e-05, | |
| "loss": 0.4393, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.47884187082405344, | |
| "grad_norm": 0.3082515377114821, | |
| "learning_rate": 4.6740924092409245e-05, | |
| "loss": 0.4449, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.48106904231625836, | |
| "grad_norm": 0.34124651164996833, | |
| "learning_rate": 4.6699669966996704e-05, | |
| "loss": 0.4772, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.48329621380846327, | |
| "grad_norm": 0.2944775803261396, | |
| "learning_rate": 4.665841584158416e-05, | |
| "loss": 0.449, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.48552338530066813, | |
| "grad_norm": 0.3238597167556311, | |
| "learning_rate": 4.661716171617162e-05, | |
| "loss": 0.4704, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.48775055679287305, | |
| "grad_norm": 0.2931824943444532, | |
| "learning_rate": 4.657590759075908e-05, | |
| "loss": 0.457, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.48997772828507796, | |
| "grad_norm": 0.3429250020790647, | |
| "learning_rate": 4.653465346534654e-05, | |
| "loss": 0.458, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.4922048997772829, | |
| "grad_norm": 0.26116764626929456, | |
| "learning_rate": 4.6493399339933995e-05, | |
| "loss": 0.4531, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.49443207126948774, | |
| "grad_norm": 0.33209861786607725, | |
| "learning_rate": 4.645214521452145e-05, | |
| "loss": 0.4684, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.49665924276169265, | |
| "grad_norm": 0.29594074682650495, | |
| "learning_rate": 4.641089108910892e-05, | |
| "loss": 0.4394, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.49888641425389757, | |
| "grad_norm": 0.31788744523283635, | |
| "learning_rate": 4.636963696369637e-05, | |
| "loss": 0.455, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.5011135857461024, | |
| "grad_norm": 0.29403815622831014, | |
| "learning_rate": 4.632838283828383e-05, | |
| "loss": 0.4617, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.5033407572383074, | |
| "grad_norm": 0.3246649957564367, | |
| "learning_rate": 4.628712871287129e-05, | |
| "loss": 0.4486, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.5055679287305123, | |
| "grad_norm": 0.2947521869215355, | |
| "learning_rate": 4.624587458745875e-05, | |
| "loss": 0.4522, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.5077951002227171, | |
| "grad_norm": 0.35319717032500775, | |
| "learning_rate": 4.62046204620462e-05, | |
| "loss": 0.4267, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.5100222717149221, | |
| "grad_norm": 0.26486969329235016, | |
| "learning_rate": 4.616336633663367e-05, | |
| "loss": 0.4454, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.512249443207127, | |
| "grad_norm": 0.28307917021023216, | |
| "learning_rate": 4.6122112211221126e-05, | |
| "loss": 0.4545, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.5144766146993318, | |
| "grad_norm": 0.29150478292360005, | |
| "learning_rate": 4.6080858085808584e-05, | |
| "loss": 0.4606, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.5167037861915368, | |
| "grad_norm": 0.29775322707947643, | |
| "learning_rate": 4.603960396039604e-05, | |
| "loss": 0.4486, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.5189309576837416, | |
| "grad_norm": 0.277863972612543, | |
| "learning_rate": 4.59983498349835e-05, | |
| "loss": 0.4572, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.5211581291759465, | |
| "grad_norm": 0.28925476269836853, | |
| "learning_rate": 4.595709570957096e-05, | |
| "loss": 0.4514, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.5233853006681515, | |
| "grad_norm": 0.2892961181023234, | |
| "learning_rate": 4.591584158415842e-05, | |
| "loss": 0.4419, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.5256124721603563, | |
| "grad_norm": 0.3156166066481885, | |
| "learning_rate": 4.5874587458745876e-05, | |
| "loss": 0.4886, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.5278396436525612, | |
| "grad_norm": 0.3083820360287261, | |
| "learning_rate": 4.5833333333333334e-05, | |
| "loss": 0.4399, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.5300668151447662, | |
| "grad_norm": 0.2999984154669874, | |
| "learning_rate": 4.57920792079208e-05, | |
| "loss": 0.4343, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.532293986636971, | |
| "grad_norm": 0.2948137814745657, | |
| "learning_rate": 4.575082508250825e-05, | |
| "loss": 0.4396, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.534521158129176, | |
| "grad_norm": 0.2867836716684221, | |
| "learning_rate": 4.570957095709571e-05, | |
| "loss": 0.4444, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.5367483296213809, | |
| "grad_norm": 0.2921803786051076, | |
| "learning_rate": 4.5668316831683174e-05, | |
| "loss": 0.4569, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.5389755011135857, | |
| "grad_norm": 0.2667402553399932, | |
| "learning_rate": 4.562706270627063e-05, | |
| "loss": 0.4478, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.5412026726057907, | |
| "grad_norm": 0.2714986926455618, | |
| "learning_rate": 4.558580858085808e-05, | |
| "loss": 0.4201, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.5434298440979956, | |
| "grad_norm": 0.3010768527952275, | |
| "learning_rate": 4.554455445544555e-05, | |
| "loss": 0.4422, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.5456570155902004, | |
| "grad_norm": 0.24424723334447315, | |
| "learning_rate": 4.550330033003301e-05, | |
| "loss": 0.4689, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.5478841870824054, | |
| "grad_norm": 0.2824641561727399, | |
| "learning_rate": 4.5462046204620465e-05, | |
| "loss": 0.4577, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.5501113585746102, | |
| "grad_norm": 0.262448982344736, | |
| "learning_rate": 4.542079207920792e-05, | |
| "loss": 0.4411, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.5523385300668151, | |
| "grad_norm": 0.260954523917445, | |
| "learning_rate": 4.537953795379538e-05, | |
| "loss": 0.4461, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.5545657015590201, | |
| "grad_norm": 0.25493119956939947, | |
| "learning_rate": 4.533828382838284e-05, | |
| "loss": 0.4457, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.5567928730512249, | |
| "grad_norm": 0.24654376052164162, | |
| "learning_rate": 4.52970297029703e-05, | |
| "loss": 0.4644, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.5590200445434298, | |
| "grad_norm": 0.2839190109435608, | |
| "learning_rate": 4.5255775577557756e-05, | |
| "loss": 0.4706, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.5612472160356348, | |
| "grad_norm": 0.26041721433507015, | |
| "learning_rate": 4.5214521452145214e-05, | |
| "loss": 0.4462, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.5634743875278396, | |
| "grad_norm": 0.2720837819840524, | |
| "learning_rate": 4.517326732673268e-05, | |
| "loss": 0.4256, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.5657015590200446, | |
| "grad_norm": 0.31459213297941446, | |
| "learning_rate": 4.513201320132013e-05, | |
| "loss": 0.4423, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.5679287305122495, | |
| "grad_norm": 0.26370544862803846, | |
| "learning_rate": 4.509075907590759e-05, | |
| "loss": 0.4478, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.5701559020044543, | |
| "grad_norm": 0.2954224532182178, | |
| "learning_rate": 4.5049504950495054e-05, | |
| "loss": 0.4482, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.5723830734966593, | |
| "grad_norm": 0.24297839932464907, | |
| "learning_rate": 4.500825082508251e-05, | |
| "loss": 0.4467, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.5746102449888641, | |
| "grad_norm": 0.30732074611692045, | |
| "learning_rate": 4.4966996699669964e-05, | |
| "loss": 0.4569, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.576837416481069, | |
| "grad_norm": 0.2561720549211904, | |
| "learning_rate": 4.492574257425743e-05, | |
| "loss": 0.4473, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.579064587973274, | |
| "grad_norm": 0.2538371668472489, | |
| "learning_rate": 4.488448844884489e-05, | |
| "loss": 0.4526, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.5812917594654788, | |
| "grad_norm": 0.3092696193325629, | |
| "learning_rate": 4.4843234323432346e-05, | |
| "loss": 0.4764, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.5835189309576837, | |
| "grad_norm": 0.27859232038495874, | |
| "learning_rate": 4.4801980198019804e-05, | |
| "loss": 0.452, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.5857461024498887, | |
| "grad_norm": 0.3200460894107544, | |
| "learning_rate": 4.476072607260726e-05, | |
| "loss": 0.4291, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.5879732739420935, | |
| "grad_norm": 0.32931430711122667, | |
| "learning_rate": 4.471947194719473e-05, | |
| "loss": 0.4582, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.5902004454342984, | |
| "grad_norm": 0.2731605321453466, | |
| "learning_rate": 4.467821782178218e-05, | |
| "loss": 0.4363, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.5924276169265034, | |
| "grad_norm": 0.2703465656719884, | |
| "learning_rate": 4.463696369636964e-05, | |
| "loss": 0.4347, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.5946547884187082, | |
| "grad_norm": 0.3626930341938144, | |
| "learning_rate": 4.45957095709571e-05, | |
| "loss": 0.4425, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.5968819599109132, | |
| "grad_norm": 0.2646653041944948, | |
| "learning_rate": 4.455445544554456e-05, | |
| "loss": 0.4289, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.5991091314031181, | |
| "grad_norm": 0.29847165588799424, | |
| "learning_rate": 4.451320132013201e-05, | |
| "loss": 0.4366, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.6013363028953229, | |
| "grad_norm": 0.26851259081432843, | |
| "learning_rate": 4.447194719471948e-05, | |
| "loss": 0.4703, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.6035634743875279, | |
| "grad_norm": 0.2987098198875876, | |
| "learning_rate": 4.4430693069306935e-05, | |
| "loss": 0.445, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.6057906458797327, | |
| "grad_norm": 0.2605905093946033, | |
| "learning_rate": 4.438943894389439e-05, | |
| "loss": 0.4357, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.6080178173719376, | |
| "grad_norm": 0.24844971421080053, | |
| "learning_rate": 4.434818481848185e-05, | |
| "loss": 0.419, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.6102449888641426, | |
| "grad_norm": 0.3290220226694587, | |
| "learning_rate": 4.430693069306931e-05, | |
| "loss": 0.4465, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.6124721603563474, | |
| "grad_norm": 0.28480796997833574, | |
| "learning_rate": 4.426567656765677e-05, | |
| "loss": 0.4441, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.6146993318485523, | |
| "grad_norm": 0.2976483417452742, | |
| "learning_rate": 4.4224422442244226e-05, | |
| "loss": 0.438, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.6169265033407573, | |
| "grad_norm": 0.32736870022235975, | |
| "learning_rate": 4.4183168316831684e-05, | |
| "loss": 0.4614, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.6191536748329621, | |
| "grad_norm": 0.26160786488834764, | |
| "learning_rate": 4.414191419141914e-05, | |
| "loss": 0.4366, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.621380846325167, | |
| "grad_norm": 0.33852355498895254, | |
| "learning_rate": 4.410066006600661e-05, | |
| "loss": 0.4484, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.623608017817372, | |
| "grad_norm": 0.28362171544376097, | |
| "learning_rate": 4.405940594059406e-05, | |
| "loss": 0.4381, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.6258351893095768, | |
| "grad_norm": 0.2811074334233704, | |
| "learning_rate": 4.401815181518152e-05, | |
| "loss": 0.4346, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.6280623608017817, | |
| "grad_norm": 0.3184745078431333, | |
| "learning_rate": 4.397689768976898e-05, | |
| "loss": 0.4457, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.6302895322939867, | |
| "grad_norm": 0.3008010945284472, | |
| "learning_rate": 4.393564356435644e-05, | |
| "loss": 0.4511, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.6325167037861915, | |
| "grad_norm": 0.32595315041730505, | |
| "learning_rate": 4.389438943894389e-05, | |
| "loss": 0.4565, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.6347438752783965, | |
| "grad_norm": 0.242270649578824, | |
| "learning_rate": 4.385313531353136e-05, | |
| "loss": 0.4247, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.6369710467706013, | |
| "grad_norm": 0.3304882530212611, | |
| "learning_rate": 4.3811881188118816e-05, | |
| "loss": 0.4459, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.6391982182628062, | |
| "grad_norm": 0.2749998460242237, | |
| "learning_rate": 4.3770627062706274e-05, | |
| "loss": 0.4725, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.6414253897550112, | |
| "grad_norm": 0.3051711298096592, | |
| "learning_rate": 4.372937293729373e-05, | |
| "loss": 0.4822, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.643652561247216, | |
| "grad_norm": 0.2742029836047166, | |
| "learning_rate": 4.368811881188119e-05, | |
| "loss": 0.4497, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.6458797327394209, | |
| "grad_norm": 0.2617353582124388, | |
| "learning_rate": 4.364686468646865e-05, | |
| "loss": 0.4467, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.6481069042316259, | |
| "grad_norm": 0.2737738804183133, | |
| "learning_rate": 4.360561056105611e-05, | |
| "loss": 0.4561, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.6503340757238307, | |
| "grad_norm": 0.3091759816395053, | |
| "learning_rate": 4.3564356435643565e-05, | |
| "loss": 0.4736, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.6525612472160356, | |
| "grad_norm": 0.25626886901062795, | |
| "learning_rate": 4.352310231023102e-05, | |
| "loss": 0.4311, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.6547884187082406, | |
| "grad_norm": 0.26935906361510203, | |
| "learning_rate": 4.348184818481849e-05, | |
| "loss": 0.4391, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.6570155902004454, | |
| "grad_norm": 0.29389907195879933, | |
| "learning_rate": 4.344059405940594e-05, | |
| "loss": 0.4355, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.6592427616926503, | |
| "grad_norm": 0.27782045565159375, | |
| "learning_rate": 4.33993399339934e-05, | |
| "loss": 0.4432, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.6614699331848553, | |
| "grad_norm": 0.2889974411301799, | |
| "learning_rate": 4.335808580858086e-05, | |
| "loss": 0.4474, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.6636971046770601, | |
| "grad_norm": 0.271845863873609, | |
| "learning_rate": 4.331683168316832e-05, | |
| "loss": 0.439, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.6659242761692651, | |
| "grad_norm": 0.29771387683903633, | |
| "learning_rate": 4.327557755775577e-05, | |
| "loss": 0.4506, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.6681514476614699, | |
| "grad_norm": 0.27637974584922304, | |
| "learning_rate": 4.323432343234324e-05, | |
| "loss": 0.4532, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.6703786191536748, | |
| "grad_norm": 0.26229222544704045, | |
| "learning_rate": 4.3193069306930696e-05, | |
| "loss": 0.4366, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.6726057906458798, | |
| "grad_norm": 0.2735519792708174, | |
| "learning_rate": 4.3151815181518154e-05, | |
| "loss": 0.4473, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.6748329621380846, | |
| "grad_norm": 0.2791681791157702, | |
| "learning_rate": 4.311056105610561e-05, | |
| "loss": 0.4618, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.6770601336302895, | |
| "grad_norm": 0.2552442658275953, | |
| "learning_rate": 4.306930693069307e-05, | |
| "loss": 0.454, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.6792873051224945, | |
| "grad_norm": 0.2911430189507063, | |
| "learning_rate": 4.302805280528053e-05, | |
| "loss": 0.4266, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.6815144766146993, | |
| "grad_norm": 0.27188666629471603, | |
| "learning_rate": 4.298679867986799e-05, | |
| "loss": 0.4451, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.6837416481069042, | |
| "grad_norm": 0.28050623589469015, | |
| "learning_rate": 4.2945544554455446e-05, | |
| "loss": 0.4365, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.6859688195991092, | |
| "grad_norm": 0.2935564501685569, | |
| "learning_rate": 4.2904290429042904e-05, | |
| "loss": 0.4591, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.688195991091314, | |
| "grad_norm": 0.24920342109951324, | |
| "learning_rate": 4.286303630363037e-05, | |
| "loss": 0.4188, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.6904231625835189, | |
| "grad_norm": 0.25293406559071957, | |
| "learning_rate": 4.282178217821782e-05, | |
| "loss": 0.4243, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.6926503340757239, | |
| "grad_norm": 0.2614798712738581, | |
| "learning_rate": 4.278052805280528e-05, | |
| "loss": 0.446, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.6948775055679287, | |
| "grad_norm": 0.30606992019945795, | |
| "learning_rate": 4.2739273927392744e-05, | |
| "loss": 0.4422, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.6971046770601337, | |
| "grad_norm": 0.24582221104090354, | |
| "learning_rate": 4.26980198019802e-05, | |
| "loss": 0.4727, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.6993318485523385, | |
| "grad_norm": 0.2706816252510679, | |
| "learning_rate": 4.265676567656766e-05, | |
| "loss": 0.4467, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.7015590200445434, | |
| "grad_norm": 0.25757445350913805, | |
| "learning_rate": 4.261551155115512e-05, | |
| "loss": 0.4312, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.7037861915367484, | |
| "grad_norm": 0.24832914827025765, | |
| "learning_rate": 4.257425742574258e-05, | |
| "loss": 0.4227, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.7060133630289532, | |
| "grad_norm": 0.26987925485376546, | |
| "learning_rate": 4.2533003300330035e-05, | |
| "loss": 0.4598, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.7082405345211581, | |
| "grad_norm": 0.27766968317320195, | |
| "learning_rate": 4.249174917491749e-05, | |
| "loss": 0.4523, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.7104677060133631, | |
| "grad_norm": 0.26095987732230747, | |
| "learning_rate": 4.245049504950495e-05, | |
| "loss": 0.4319, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.7126948775055679, | |
| "grad_norm": 0.26160766384381995, | |
| "learning_rate": 4.240924092409242e-05, | |
| "loss": 0.4412, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.7149220489977728, | |
| "grad_norm": 0.2584570874357079, | |
| "learning_rate": 4.236798679867987e-05, | |
| "loss": 0.4551, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.7171492204899778, | |
| "grad_norm": 0.2834159008077785, | |
| "learning_rate": 4.2326732673267326e-05, | |
| "loss": 0.4431, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.7193763919821826, | |
| "grad_norm": 0.24898946543574163, | |
| "learning_rate": 4.228547854785479e-05, | |
| "loss": 0.4302, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.7216035634743875, | |
| "grad_norm": 0.28860867984847915, | |
| "learning_rate": 4.224422442244225e-05, | |
| "loss": 0.4475, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.7238307349665924, | |
| "grad_norm": 0.23833559702503349, | |
| "learning_rate": 4.22029702970297e-05, | |
| "loss": 0.4716, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.7260579064587973, | |
| "grad_norm": 0.2763178033720794, | |
| "learning_rate": 4.2161716171617166e-05, | |
| "loss": 0.424, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.7282850779510023, | |
| "grad_norm": 0.2402663207112546, | |
| "learning_rate": 4.2120462046204624e-05, | |
| "loss": 0.4373, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.7305122494432071, | |
| "grad_norm": 0.2714560047859496, | |
| "learning_rate": 4.207920792079208e-05, | |
| "loss": 0.434, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.732739420935412, | |
| "grad_norm": 0.22960996560195104, | |
| "learning_rate": 4.203795379537954e-05, | |
| "loss": 0.4173, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.734966592427617, | |
| "grad_norm": 0.30360671082013174, | |
| "learning_rate": 4.1996699669967e-05, | |
| "loss": 0.4484, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.7371937639198218, | |
| "grad_norm": 0.24679972364853342, | |
| "learning_rate": 4.195544554455446e-05, | |
| "loss": 0.4522, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.7394209354120267, | |
| "grad_norm": 0.3047224480559927, | |
| "learning_rate": 4.1914191419141916e-05, | |
| "loss": 0.4326, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.7416481069042317, | |
| "grad_norm": 0.2546066095218112, | |
| "learning_rate": 4.1872937293729374e-05, | |
| "loss": 0.4498, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.7438752783964365, | |
| "grad_norm": 0.25721504775664134, | |
| "learning_rate": 4.183168316831683e-05, | |
| "loss": 0.431, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.7461024498886414, | |
| "grad_norm": 0.2691894404395012, | |
| "learning_rate": 4.17904290429043e-05, | |
| "loss": 0.4387, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.7483296213808464, | |
| "grad_norm": 0.23271965303359804, | |
| "learning_rate": 4.174917491749175e-05, | |
| "loss": 0.4399, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.7505567928730512, | |
| "grad_norm": 0.26493570894435625, | |
| "learning_rate": 4.170792079207921e-05, | |
| "loss": 0.4459, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.7527839643652561, | |
| "grad_norm": 2.0248894289396824, | |
| "learning_rate": 4.166666666666667e-05, | |
| "loss": 0.474, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.755011135857461, | |
| "grad_norm": 0.3062099581698654, | |
| "learning_rate": 4.162541254125413e-05, | |
| "loss": 0.4358, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.7572383073496659, | |
| "grad_norm": 0.25946848706627174, | |
| "learning_rate": 4.158415841584158e-05, | |
| "loss": 0.4233, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.7594654788418709, | |
| "grad_norm": 0.25713280085130474, | |
| "learning_rate": 4.154290429042905e-05, | |
| "loss": 0.4439, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.7616926503340757, | |
| "grad_norm": 0.3167572006113162, | |
| "learning_rate": 4.1501650165016505e-05, | |
| "loss": 0.4436, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.7639198218262806, | |
| "grad_norm": 0.26048288751304155, | |
| "learning_rate": 4.146039603960396e-05, | |
| "loss": 0.417, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.7661469933184856, | |
| "grad_norm": 0.25497473878005894, | |
| "learning_rate": 4.141914191419142e-05, | |
| "loss": 0.4557, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.7683741648106904, | |
| "grad_norm": 0.2911566218466426, | |
| "learning_rate": 4.137788778877888e-05, | |
| "loss": 0.4404, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.7706013363028953, | |
| "grad_norm": 0.25971342414899073, | |
| "learning_rate": 4.133663366336634e-05, | |
| "loss": 0.4318, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.7728285077951003, | |
| "grad_norm": 0.3151547768170685, | |
| "learning_rate": 4.1295379537953796e-05, | |
| "loss": 0.4105, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.7750556792873051, | |
| "grad_norm": 0.2738918273042278, | |
| "learning_rate": 4.1254125412541255e-05, | |
| "loss": 0.4436, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.77728285077951, | |
| "grad_norm": 0.296938490357881, | |
| "learning_rate": 4.121287128712871e-05, | |
| "loss": 0.4448, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.779510022271715, | |
| "grad_norm": 0.27038716684654934, | |
| "learning_rate": 4.117161716171618e-05, | |
| "loss": 0.4454, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.7817371937639198, | |
| "grad_norm": 0.3227956568011175, | |
| "learning_rate": 4.113036303630363e-05, | |
| "loss": 0.4406, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.7839643652561247, | |
| "grad_norm": 0.26633298229761343, | |
| "learning_rate": 4.108910891089109e-05, | |
| "loss": 0.4376, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.7861915367483296, | |
| "grad_norm": 0.28347495442610243, | |
| "learning_rate": 4.104785478547855e-05, | |
| "loss": 0.4337, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.7884187082405345, | |
| "grad_norm": 0.30182784687239844, | |
| "learning_rate": 4.100660066006601e-05, | |
| "loss": 0.4606, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.7906458797327395, | |
| "grad_norm": 0.2708234094108942, | |
| "learning_rate": 4.096534653465346e-05, | |
| "loss": 0.4503, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.7928730512249443, | |
| "grad_norm": 0.2606877113787521, | |
| "learning_rate": 4.092409240924093e-05, | |
| "loss": 0.4627, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.7951002227171492, | |
| "grad_norm": 0.255498912261423, | |
| "learning_rate": 4.0882838283828386e-05, | |
| "loss": 0.4494, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.7973273942093542, | |
| "grad_norm": 0.2634192898727719, | |
| "learning_rate": 4.0841584158415844e-05, | |
| "loss": 0.4378, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.799554565701559, | |
| "grad_norm": 0.2455180865698325, | |
| "learning_rate": 4.08003300330033e-05, | |
| "loss": 0.4425, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.8017817371937639, | |
| "grad_norm": 0.29057066501650736, | |
| "learning_rate": 4.075907590759076e-05, | |
| "loss": 0.4504, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.8040089086859689, | |
| "grad_norm": 0.24841510932266841, | |
| "learning_rate": 4.071782178217822e-05, | |
| "loss": 0.4308, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.8062360801781737, | |
| "grad_norm": 0.30437614999300794, | |
| "learning_rate": 4.067656765676568e-05, | |
| "loss": 0.4225, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.8084632516703786, | |
| "grad_norm": 0.24654821833407398, | |
| "learning_rate": 4.0635313531353135e-05, | |
| "loss": 0.4468, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.8106904231625836, | |
| "grad_norm": 0.34206382856361406, | |
| "learning_rate": 4.05940594059406e-05, | |
| "loss": 0.4238, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.8129175946547884, | |
| "grad_norm": 0.23051720605393852, | |
| "learning_rate": 4.055280528052806e-05, | |
| "loss": 0.4302, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.8151447661469933, | |
| "grad_norm": 0.30090350911289054, | |
| "learning_rate": 4.051155115511551e-05, | |
| "loss": 0.4422, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.8173719376391982, | |
| "grad_norm": 0.24573950009262582, | |
| "learning_rate": 4.0470297029702975e-05, | |
| "loss": 0.4306, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.8195991091314031, | |
| "grad_norm": 0.2882877236903447, | |
| "learning_rate": 4.042904290429043e-05, | |
| "loss": 0.4652, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.821826280623608, | |
| "grad_norm": 0.24988067797973895, | |
| "learning_rate": 4.038778877887789e-05, | |
| "loss": 0.4362, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.8240534521158129, | |
| "grad_norm": 0.29408476035049613, | |
| "learning_rate": 4.034653465346535e-05, | |
| "loss": 0.4435, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.8262806236080178, | |
| "grad_norm": 0.2644363301471362, | |
| "learning_rate": 4.030528052805281e-05, | |
| "loss": 0.4344, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.8285077951002228, | |
| "grad_norm": 0.29493993411578473, | |
| "learning_rate": 4.0264026402640266e-05, | |
| "loss": 0.434, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.8307349665924276, | |
| "grad_norm": 0.3468561021222359, | |
| "learning_rate": 4.0222772277227725e-05, | |
| "loss": 0.4384, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.8329621380846325, | |
| "grad_norm": 0.29607221940526673, | |
| "learning_rate": 4.018151815181518e-05, | |
| "loss": 0.4628, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.8351893095768375, | |
| "grad_norm": 0.3637525910818386, | |
| "learning_rate": 4.014026402640264e-05, | |
| "loss": 0.4415, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.8374164810690423, | |
| "grad_norm": 0.28714265367455805, | |
| "learning_rate": 4.0099009900990106e-05, | |
| "loss": 0.4376, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.8396436525612472, | |
| "grad_norm": 0.32629775286679424, | |
| "learning_rate": 4.005775577557756e-05, | |
| "loss": 0.4235, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.8418708240534521, | |
| "grad_norm": 0.2802612112542595, | |
| "learning_rate": 4.0016501650165016e-05, | |
| "loss": 0.4317, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.844097995545657, | |
| "grad_norm": 0.2918531735740327, | |
| "learning_rate": 3.997524752475248e-05, | |
| "loss": 0.452, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.8463251670378619, | |
| "grad_norm": 0.3352571650408221, | |
| "learning_rate": 3.993399339933994e-05, | |
| "loss": 0.4386, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.8485523385300668, | |
| "grad_norm": 0.2573244398349893, | |
| "learning_rate": 3.989273927392739e-05, | |
| "loss": 0.4574, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.8507795100222717, | |
| "grad_norm": 0.3027471516866268, | |
| "learning_rate": 3.9851485148514856e-05, | |
| "loss": 0.4419, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.8530066815144766, | |
| "grad_norm": 0.2854937531018722, | |
| "learning_rate": 3.9810231023102314e-05, | |
| "loss": 0.4584, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.8552338530066815, | |
| "grad_norm": 0.24589201590337514, | |
| "learning_rate": 3.976897689768977e-05, | |
| "loss": 0.4193, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.8574610244988864, | |
| "grad_norm": 0.2778137032428908, | |
| "learning_rate": 3.972772277227723e-05, | |
| "loss": 0.4343, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.8596881959910914, | |
| "grad_norm": 0.3342562442904411, | |
| "learning_rate": 3.968646864686469e-05, | |
| "loss": 0.4326, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.8619153674832962, | |
| "grad_norm": 0.24896713048447036, | |
| "learning_rate": 3.964521452145215e-05, | |
| "loss": 0.432, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.8641425389755011, | |
| "grad_norm": 0.29405006865231625, | |
| "learning_rate": 3.9603960396039605e-05, | |
| "loss": 0.433, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.8663697104677061, | |
| "grad_norm": 0.23757029182896722, | |
| "learning_rate": 3.9562706270627064e-05, | |
| "loss": 0.4418, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.8685968819599109, | |
| "grad_norm": 0.2724752861847499, | |
| "learning_rate": 3.952145214521452e-05, | |
| "loss": 0.422, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.8708240534521158, | |
| "grad_norm": 0.264389356209867, | |
| "learning_rate": 3.948019801980199e-05, | |
| "loss": 0.4247, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.8730512249443207, | |
| "grad_norm": 0.27027284504631205, | |
| "learning_rate": 3.943894389438944e-05, | |
| "loss": 0.4307, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.8752783964365256, | |
| "grad_norm": 0.266772697377517, | |
| "learning_rate": 3.9397689768976897e-05, | |
| "loss": 0.4309, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.8775055679287305, | |
| "grad_norm": 0.2614972736062481, | |
| "learning_rate": 3.935643564356436e-05, | |
| "loss": 0.4187, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.8797327394209354, | |
| "grad_norm": 0.23777796416631325, | |
| "learning_rate": 3.931518151815182e-05, | |
| "loss": 0.4303, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.8819599109131403, | |
| "grad_norm": 0.25876129593586256, | |
| "learning_rate": 3.927392739273927e-05, | |
| "loss": 0.4277, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.8841870824053452, | |
| "grad_norm": 0.24533319198985845, | |
| "learning_rate": 3.9232673267326736e-05, | |
| "loss": 0.4316, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.8864142538975501, | |
| "grad_norm": 0.2827282102508032, | |
| "learning_rate": 3.9191419141914195e-05, | |
| "loss": 0.4374, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.888641425389755, | |
| "grad_norm": 0.25672028235670863, | |
| "learning_rate": 3.915016501650165e-05, | |
| "loss": 0.4334, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.89086859688196, | |
| "grad_norm": 0.2964434942824687, | |
| "learning_rate": 3.910891089108911e-05, | |
| "loss": 0.4382, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.8930957683741648, | |
| "grad_norm": 0.2992038368053277, | |
| "learning_rate": 3.906765676567657e-05, | |
| "loss": 0.4506, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.8953229398663697, | |
| "grad_norm": 0.2824229554017287, | |
| "learning_rate": 3.902640264026403e-05, | |
| "loss": 0.4292, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.8975501113585747, | |
| "grad_norm": 0.3021114688794221, | |
| "learning_rate": 3.8985148514851486e-05, | |
| "loss": 0.4159, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.8997772828507795, | |
| "grad_norm": 0.2670836799841371, | |
| "learning_rate": 3.8943894389438944e-05, | |
| "loss": 0.4167, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.9020044543429844, | |
| "grad_norm": 0.2751414532574633, | |
| "learning_rate": 3.89026402640264e-05, | |
| "loss": 0.4216, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.9042316258351893, | |
| "grad_norm": 0.33093586819706966, | |
| "learning_rate": 3.886138613861387e-05, | |
| "loss": 0.4221, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.9064587973273942, | |
| "grad_norm": 0.28660771966605225, | |
| "learning_rate": 3.882013201320132e-05, | |
| "loss": 0.4328, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.9086859688195991, | |
| "grad_norm": 0.31729231099616456, | |
| "learning_rate": 3.877887788778878e-05, | |
| "loss": 0.42, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.910913140311804, | |
| "grad_norm": 0.28276830996460967, | |
| "learning_rate": 3.873762376237624e-05, | |
| "loss": 0.4315, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.9131403118040089, | |
| "grad_norm": 0.2637643372082265, | |
| "learning_rate": 3.86963696369637e-05, | |
| "loss": 0.4464, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.9153674832962138, | |
| "grad_norm": 0.29760756967577906, | |
| "learning_rate": 3.865511551155115e-05, | |
| "loss": 0.4309, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.9175946547884187, | |
| "grad_norm": 0.27094511138723293, | |
| "learning_rate": 3.861386138613862e-05, | |
| "loss": 0.4321, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.9198218262806236, | |
| "grad_norm": 0.2583422821283627, | |
| "learning_rate": 3.8572607260726075e-05, | |
| "loss": 0.4096, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.9220489977728286, | |
| "grad_norm": 0.29483389234975, | |
| "learning_rate": 3.8531353135313534e-05, | |
| "loss": 0.4352, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.9242761692650334, | |
| "grad_norm": 0.2760297601343316, | |
| "learning_rate": 3.849009900990099e-05, | |
| "loss": 0.4433, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.9265033407572383, | |
| "grad_norm": 0.2632096095730053, | |
| "learning_rate": 3.844884488448845e-05, | |
| "loss": 0.444, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.9287305122494433, | |
| "grad_norm": 0.26571838725109725, | |
| "learning_rate": 3.8407590759075915e-05, | |
| "loss": 0.4479, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.9309576837416481, | |
| "grad_norm": 0.2612223809833972, | |
| "learning_rate": 3.8366336633663367e-05, | |
| "loss": 0.43, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.933184855233853, | |
| "grad_norm": 0.20729020473362156, | |
| "learning_rate": 3.8325082508250825e-05, | |
| "loss": 0.4269, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.9354120267260579, | |
| "grad_norm": 0.30206416988432955, | |
| "learning_rate": 3.828382838283829e-05, | |
| "loss": 0.4401, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.9376391982182628, | |
| "grad_norm": 0.23463191817841658, | |
| "learning_rate": 3.824257425742575e-05, | |
| "loss": 0.4382, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.9398663697104677, | |
| "grad_norm": 0.2830697542352734, | |
| "learning_rate": 3.82013201320132e-05, | |
| "loss": 0.4599, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.9420935412026726, | |
| "grad_norm": 0.24602603533970543, | |
| "learning_rate": 3.8160066006600665e-05, | |
| "loss": 0.4176, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.9443207126948775, | |
| "grad_norm": 0.3337694316665918, | |
| "learning_rate": 3.811881188118812e-05, | |
| "loss": 0.4435, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.9465478841870824, | |
| "grad_norm": 0.23677648895454642, | |
| "learning_rate": 3.807755775577558e-05, | |
| "loss": 0.4368, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.9487750556792873, | |
| "grad_norm": 0.29638771606416087, | |
| "learning_rate": 3.803630363036304e-05, | |
| "loss": 0.4317, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.9510022271714922, | |
| "grad_norm": 0.24211776195738213, | |
| "learning_rate": 3.79950495049505e-05, | |
| "loss": 0.4274, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.9532293986636972, | |
| "grad_norm": 0.2246529620028757, | |
| "learning_rate": 3.7953795379537956e-05, | |
| "loss": 0.4191, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.955456570155902, | |
| "grad_norm": 0.25492756281023654, | |
| "learning_rate": 3.7912541254125414e-05, | |
| "loss": 0.4279, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.9576837416481069, | |
| "grad_norm": 0.26189081420987503, | |
| "learning_rate": 3.787128712871287e-05, | |
| "loss": 0.4436, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.9599109131403119, | |
| "grad_norm": 0.243975014278495, | |
| "learning_rate": 3.783003300330033e-05, | |
| "loss": 0.4292, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.9621380846325167, | |
| "grad_norm": 0.242157774083566, | |
| "learning_rate": 3.7788778877887796e-05, | |
| "loss": 0.4066, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.9643652561247216, | |
| "grad_norm": 0.23787781526211782, | |
| "learning_rate": 3.774752475247525e-05, | |
| "loss": 0.4162, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.9665924276169265, | |
| "grad_norm": 0.265386727407684, | |
| "learning_rate": 3.7706270627062705e-05, | |
| "loss": 0.4317, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.9688195991091314, | |
| "grad_norm": 0.2757387320364763, | |
| "learning_rate": 3.766501650165017e-05, | |
| "loss": 0.429, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.9710467706013363, | |
| "grad_norm": 0.23589298838942482, | |
| "learning_rate": 3.762376237623763e-05, | |
| "loss": 0.427, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.9732739420935412, | |
| "grad_norm": 0.3019480813401635, | |
| "learning_rate": 3.758250825082508e-05, | |
| "loss": 0.4105, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.9755011135857461, | |
| "grad_norm": 0.2322268885125827, | |
| "learning_rate": 3.7541254125412545e-05, | |
| "loss": 0.4292, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.977728285077951, | |
| "grad_norm": 0.29553504935288555, | |
| "learning_rate": 3.7500000000000003e-05, | |
| "loss": 0.4117, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.9799554565701559, | |
| "grad_norm": 0.25987416783203476, | |
| "learning_rate": 3.745874587458746e-05, | |
| "loss": 0.4152, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.9821826280623608, | |
| "grad_norm": 0.23372024946025963, | |
| "learning_rate": 3.741749174917492e-05, | |
| "loss": 0.4064, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.9844097995545658, | |
| "grad_norm": 0.3761401693903718, | |
| "learning_rate": 3.737623762376238e-05, | |
| "loss": 0.4434, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.9866369710467706, | |
| "grad_norm": 0.240221654416868, | |
| "learning_rate": 3.7334983498349837e-05, | |
| "loss": 0.4329, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.9888641425389755, | |
| "grad_norm": 0.22546093040269893, | |
| "learning_rate": 3.7293729372937295e-05, | |
| "loss": 0.4308, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.9910913140311804, | |
| "grad_norm": 0.27239165721549624, | |
| "learning_rate": 3.725247524752475e-05, | |
| "loss": 0.4505, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.9933184855233853, | |
| "grad_norm": 0.2611268555670104, | |
| "learning_rate": 3.721122112211221e-05, | |
| "loss": 0.4296, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.9955456570155902, | |
| "grad_norm": 0.22475728637651804, | |
| "learning_rate": 3.7169966996699676e-05, | |
| "loss": 0.4284, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.9977728285077951, | |
| "grad_norm": 0.28347572303148216, | |
| "learning_rate": 3.712871287128713e-05, | |
| "loss": 0.4475, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.2508993557749204, | |
| "learning_rate": 3.7087458745874586e-05, | |
| "loss": 0.4384, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.0022271714922049, | |
| "grad_norm": 0.29315360782372374, | |
| "learning_rate": 3.704620462046205e-05, | |
| "loss": 0.3846, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.0044543429844097, | |
| "grad_norm": 0.27683401693327114, | |
| "learning_rate": 3.700495049504951e-05, | |
| "loss": 0.3782, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.0066815144766148, | |
| "grad_norm": 0.23634840578661528, | |
| "learning_rate": 3.696369636963696e-05, | |
| "loss": 0.3674, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.0089086859688197, | |
| "grad_norm": 0.3002065151882711, | |
| "learning_rate": 3.6922442244224426e-05, | |
| "loss": 0.3759, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.0111358574610245, | |
| "grad_norm": 0.25303557292326284, | |
| "learning_rate": 3.6881188118811884e-05, | |
| "loss": 0.3301, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.0133630289532294, | |
| "grad_norm": 0.3119179255994917, | |
| "learning_rate": 3.683993399339934e-05, | |
| "loss": 0.3601, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.0155902004454342, | |
| "grad_norm": 0.2915555001895583, | |
| "learning_rate": 3.67986798679868e-05, | |
| "loss": 0.3628, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.017817371937639, | |
| "grad_norm": 0.25225185820529017, | |
| "learning_rate": 3.675742574257426e-05, | |
| "loss": 0.3549, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.0200445434298442, | |
| "grad_norm": 0.29883651297742836, | |
| "learning_rate": 3.671617161716172e-05, | |
| "loss": 0.3542, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.022271714922049, | |
| "grad_norm": 0.2744252590186249, | |
| "learning_rate": 3.6674917491749175e-05, | |
| "loss": 0.3829, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.024498886414254, | |
| "grad_norm": 0.25716474537280526, | |
| "learning_rate": 3.6633663366336634e-05, | |
| "loss": 0.368, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.0267260579064588, | |
| "grad_norm": 0.2609820678616415, | |
| "learning_rate": 3.659240924092409e-05, | |
| "loss": 0.3721, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.0289532293986636, | |
| "grad_norm": 0.27931915304683225, | |
| "learning_rate": 3.655115511551156e-05, | |
| "loss": 0.3585, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.0311804008908685, | |
| "grad_norm": 0.25961656509726105, | |
| "learning_rate": 3.650990099009901e-05, | |
| "loss": 0.3645, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.0334075723830736, | |
| "grad_norm": 0.2585612306648301, | |
| "learning_rate": 3.646864686468647e-05, | |
| "loss": 0.3547, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.0356347438752784, | |
| "grad_norm": 0.2989697101748151, | |
| "learning_rate": 3.642739273927393e-05, | |
| "loss": 0.3891, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.0378619153674833, | |
| "grad_norm": 0.2557096993002286, | |
| "learning_rate": 3.638613861386139e-05, | |
| "loss": 0.3586, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.0400890868596881, | |
| "grad_norm": 0.2711858210459584, | |
| "learning_rate": 3.634488448844885e-05, | |
| "loss": 0.3475, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.042316258351893, | |
| "grad_norm": 0.251733504110324, | |
| "learning_rate": 3.6303630363036307e-05, | |
| "loss": 0.3378, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.044543429844098, | |
| "grad_norm": 0.29452366944429953, | |
| "learning_rate": 3.6262376237623765e-05, | |
| "loss": 0.3551, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.046770601336303, | |
| "grad_norm": 0.26713159756280325, | |
| "learning_rate": 3.622112211221122e-05, | |
| "loss": 0.3603, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.0489977728285078, | |
| "grad_norm": 0.26982792989271304, | |
| "learning_rate": 3.617986798679868e-05, | |
| "loss": 0.3445, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.0512249443207127, | |
| "grad_norm": 0.29846303257877244, | |
| "learning_rate": 3.613861386138614e-05, | |
| "loss": 0.3242, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.0534521158129175, | |
| "grad_norm": 0.24183731075742984, | |
| "learning_rate": 3.6097359735973605e-05, | |
| "loss": 0.3721, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.0556792873051224, | |
| "grad_norm": 0.29520023901045445, | |
| "learning_rate": 3.6056105610561056e-05, | |
| "loss": 0.355, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.0579064587973275, | |
| "grad_norm": 0.3047522734685861, | |
| "learning_rate": 3.6014851485148514e-05, | |
| "loss": 0.351, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.0601336302895323, | |
| "grad_norm": 0.26831780421495066, | |
| "learning_rate": 3.597359735973598e-05, | |
| "loss": 0.3585, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.0623608017817372, | |
| "grad_norm": 0.27060903468927394, | |
| "learning_rate": 3.593234323432344e-05, | |
| "loss": 0.3552, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.064587973273942, | |
| "grad_norm": 0.2620962919692702, | |
| "learning_rate": 3.589108910891089e-05, | |
| "loss": 0.358, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.066815144766147, | |
| "grad_norm": 0.28827255446142563, | |
| "learning_rate": 3.5849834983498354e-05, | |
| "loss": 0.3548, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.069042316258352, | |
| "grad_norm": 0.2730218486644132, | |
| "learning_rate": 3.580858085808581e-05, | |
| "loss": 0.3525, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.0712694877505569, | |
| "grad_norm": 0.22229426724187393, | |
| "learning_rate": 3.576732673267327e-05, | |
| "loss": 0.3519, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.0734966592427617, | |
| "grad_norm": 0.25096650404869353, | |
| "learning_rate": 3.572607260726073e-05, | |
| "loss": 0.3677, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.0757238307349666, | |
| "grad_norm": 0.21704229516633425, | |
| "learning_rate": 3.568481848184819e-05, | |
| "loss": 0.3434, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.0779510022271714, | |
| "grad_norm": 0.2510064984692589, | |
| "learning_rate": 3.5643564356435645e-05, | |
| "loss": 0.3579, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.0801781737193763, | |
| "grad_norm": 0.2444418100304988, | |
| "learning_rate": 3.5602310231023104e-05, | |
| "loss": 0.3668, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.0824053452115814, | |
| "grad_norm": 0.22997335647464462, | |
| "learning_rate": 3.556105610561056e-05, | |
| "loss": 0.3517, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.0846325167037862, | |
| "grad_norm": 0.24920261498930532, | |
| "learning_rate": 3.551980198019802e-05, | |
| "loss": 0.3681, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.086859688195991, | |
| "grad_norm": 0.22014098096769694, | |
| "learning_rate": 3.5478547854785485e-05, | |
| "loss": 0.3482, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.089086859688196, | |
| "grad_norm": 0.2065658510344769, | |
| "learning_rate": 3.543729372937294e-05, | |
| "loss": 0.3287, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.0913140311804008, | |
| "grad_norm": 0.23758692598217404, | |
| "learning_rate": 3.5396039603960395e-05, | |
| "loss": 0.3635, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.093541202672606, | |
| "grad_norm": 0.2571919107965832, | |
| "learning_rate": 3.535478547854786e-05, | |
| "loss": 0.382, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.0957683741648108, | |
| "grad_norm": 0.24202112635122558, | |
| "learning_rate": 3.531353135313532e-05, | |
| "loss": 0.3551, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.0979955456570156, | |
| "grad_norm": 0.26045245117952176, | |
| "learning_rate": 3.527227722772277e-05, | |
| "loss": 0.339, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.1002227171492205, | |
| "grad_norm": 0.2473663708499315, | |
| "learning_rate": 3.5231023102310235e-05, | |
| "loss": 0.361, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.1024498886414253, | |
| "grad_norm": 0.2681151131548373, | |
| "learning_rate": 3.518976897689769e-05, | |
| "loss": 0.3609, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.1046770601336302, | |
| "grad_norm": 0.2630111804406823, | |
| "learning_rate": 3.514851485148515e-05, | |
| "loss": 0.3575, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.1069042316258353, | |
| "grad_norm": 0.2625195461023254, | |
| "learning_rate": 3.510726072607261e-05, | |
| "loss": 0.3679, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.1091314031180401, | |
| "grad_norm": 0.2666448934183908, | |
| "learning_rate": 3.506600660066007e-05, | |
| "loss": 0.3709, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.111358574610245, | |
| "grad_norm": 0.28347710182340724, | |
| "learning_rate": 3.5024752475247526e-05, | |
| "loss": 0.3615, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.1135857461024499, | |
| "grad_norm": 0.24355474473276895, | |
| "learning_rate": 3.4983498349834984e-05, | |
| "loss": 0.3363, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.1158129175946547, | |
| "grad_norm": 0.2729082414959115, | |
| "learning_rate": 3.494224422442244e-05, | |
| "loss": 0.3594, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.1180400890868596, | |
| "grad_norm": 0.29469449597071923, | |
| "learning_rate": 3.49009900990099e-05, | |
| "loss": 0.3639, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.1202672605790647, | |
| "grad_norm": 0.25125408230757457, | |
| "learning_rate": 3.4859735973597366e-05, | |
| "loss": 0.3545, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.1224944320712695, | |
| "grad_norm": 0.24147558861664065, | |
| "learning_rate": 3.481848184818482e-05, | |
| "loss": 0.3662, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.1247216035634744, | |
| "grad_norm": 0.29415126030943894, | |
| "learning_rate": 3.4777227722772276e-05, | |
| "loss": 0.3454, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.1269487750556793, | |
| "grad_norm": 0.24720611055347713, | |
| "learning_rate": 3.473597359735974e-05, | |
| "loss": 0.3497, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.1291759465478841, | |
| "grad_norm": 0.25957656510507626, | |
| "learning_rate": 3.46947194719472e-05, | |
| "loss": 0.347, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.131403118040089, | |
| "grad_norm": 0.2450716784138782, | |
| "learning_rate": 3.465346534653465e-05, | |
| "loss": 0.366, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.133630289532294, | |
| "grad_norm": 0.30031981997460955, | |
| "learning_rate": 3.4612211221122115e-05, | |
| "loss": 0.3576, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.135857461024499, | |
| "grad_norm": 0.24446990508884073, | |
| "learning_rate": 3.4570957095709574e-05, | |
| "loss": 0.3573, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.1380846325167038, | |
| "grad_norm": 0.22244869982263982, | |
| "learning_rate": 3.452970297029703e-05, | |
| "loss": 0.354, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.1403118040089086, | |
| "grad_norm": 0.26352213431786436, | |
| "learning_rate": 3.448844884488449e-05, | |
| "loss": 0.3693, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.1425389755011135, | |
| "grad_norm": 0.2142385019175425, | |
| "learning_rate": 3.444719471947195e-05, | |
| "loss": 0.339, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.1447661469933186, | |
| "grad_norm": 0.277588850926534, | |
| "learning_rate": 3.440594059405941e-05, | |
| "loss": 0.3579, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.1469933184855234, | |
| "grad_norm": 0.23632968130731732, | |
| "learning_rate": 3.4364686468646865e-05, | |
| "loss": 0.3569, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.1492204899777283, | |
| "grad_norm": 0.2788228005066622, | |
| "learning_rate": 3.432343234323432e-05, | |
| "loss": 0.3613, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.1514476614699332, | |
| "grad_norm": 0.24060550221371907, | |
| "learning_rate": 3.428217821782179e-05, | |
| "loss": 0.3477, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.153674832962138, | |
| "grad_norm": 0.258548604271602, | |
| "learning_rate": 3.4240924092409246e-05, | |
| "loss": 0.3357, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.1559020044543429, | |
| "grad_norm": 0.2193227357341931, | |
| "learning_rate": 3.41996699669967e-05, | |
| "loss": 0.3387, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.158129175946548, | |
| "grad_norm": 0.23695591929388757, | |
| "learning_rate": 3.415841584158416e-05, | |
| "loss": 0.35, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.1603563474387528, | |
| "grad_norm": 0.24085283197224489, | |
| "learning_rate": 3.411716171617162e-05, | |
| "loss": 0.3518, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.1625835189309577, | |
| "grad_norm": 0.26698388090393044, | |
| "learning_rate": 3.407590759075908e-05, | |
| "loss": 0.3728, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.1648106904231625, | |
| "grad_norm": 0.2514334608054634, | |
| "learning_rate": 3.403465346534654e-05, | |
| "loss": 0.368, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.1670378619153674, | |
| "grad_norm": 0.2706263407063563, | |
| "learning_rate": 3.3993399339933996e-05, | |
| "loss": 0.3625, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.1692650334075725, | |
| "grad_norm": 0.23649808379024595, | |
| "learning_rate": 3.3952145214521454e-05, | |
| "loss": 0.3473, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.1714922048997773, | |
| "grad_norm": 0.22320907181621547, | |
| "learning_rate": 3.391089108910891e-05, | |
| "loss": 0.334, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.1737193763919822, | |
| "grad_norm": 0.2395789753574448, | |
| "learning_rate": 3.386963696369637e-05, | |
| "loss": 0.3577, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.175946547884187, | |
| "grad_norm": 0.22982302309839056, | |
| "learning_rate": 3.382838283828383e-05, | |
| "loss": 0.336, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.178173719376392, | |
| "grad_norm": 0.26592300199211427, | |
| "learning_rate": 3.3787128712871294e-05, | |
| "loss": 0.3737, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.1804008908685968, | |
| "grad_norm": 0.2379664507554433, | |
| "learning_rate": 3.3745874587458746e-05, | |
| "loss": 0.3508, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.1826280623608019, | |
| "grad_norm": 0.27284638634221087, | |
| "learning_rate": 3.3704620462046204e-05, | |
| "loss": 0.3788, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.1848552338530067, | |
| "grad_norm": 0.22298690979985145, | |
| "learning_rate": 3.366336633663367e-05, | |
| "loss": 0.361, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.1870824053452116, | |
| "grad_norm": 0.25342901703627585, | |
| "learning_rate": 3.362211221122113e-05, | |
| "loss": 0.3461, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.1893095768374164, | |
| "grad_norm": 0.2462918835412843, | |
| "learning_rate": 3.358085808580858e-05, | |
| "loss": 0.3673, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.1915367483296213, | |
| "grad_norm": 0.23480418831778518, | |
| "learning_rate": 3.3539603960396044e-05, | |
| "loss": 0.3305, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.1937639198218264, | |
| "grad_norm": 0.22579058718977288, | |
| "learning_rate": 3.34983498349835e-05, | |
| "loss": 0.3484, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.1959910913140313, | |
| "grad_norm": 0.22351739536686402, | |
| "learning_rate": 3.345709570957096e-05, | |
| "loss": 0.3605, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.1982182628062361, | |
| "grad_norm": 0.23250187182794887, | |
| "learning_rate": 3.341584158415842e-05, | |
| "loss": 0.3525, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.200445434298441, | |
| "grad_norm": 0.22930925968735663, | |
| "learning_rate": 3.337458745874588e-05, | |
| "loss": 0.3667, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.2026726057906458, | |
| "grad_norm": 0.23045423822229336, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 0.3495, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.2048997772828507, | |
| "grad_norm": 0.23457659707681205, | |
| "learning_rate": 3.329207920792079e-05, | |
| "loss": 0.358, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.2071269487750558, | |
| "grad_norm": 0.26247810496966095, | |
| "learning_rate": 3.325082508250825e-05, | |
| "loss": 0.3507, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.2093541202672606, | |
| "grad_norm": 0.21223867220084797, | |
| "learning_rate": 3.320957095709571e-05, | |
| "loss": 0.3527, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.2115812917594655, | |
| "grad_norm": 0.235166791081928, | |
| "learning_rate": 3.3168316831683175e-05, | |
| "loss": 0.357, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.2138084632516704, | |
| "grad_norm": 0.2541299314311305, | |
| "learning_rate": 3.3127062706270626e-05, | |
| "loss": 0.3559, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.2160356347438752, | |
| "grad_norm": 0.23336231564934568, | |
| "learning_rate": 3.3085808580858084e-05, | |
| "loss": 0.3723, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.2182628062360803, | |
| "grad_norm": 0.24580602513394764, | |
| "learning_rate": 3.304455445544555e-05, | |
| "loss": 0.3684, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.2204899777282852, | |
| "grad_norm": 0.20485755646576795, | |
| "learning_rate": 3.300330033003301e-05, | |
| "loss": 0.3531, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.22271714922049, | |
| "grad_norm": 0.24371288647694048, | |
| "learning_rate": 3.296204620462046e-05, | |
| "loss": 0.3585, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.2249443207126949, | |
| "grad_norm": 0.19774021220722549, | |
| "learning_rate": 3.2920792079207924e-05, | |
| "loss": 0.339, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.2271714922048997, | |
| "grad_norm": 0.2600235842796674, | |
| "learning_rate": 3.287953795379538e-05, | |
| "loss": 0.36, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.2293986636971046, | |
| "grad_norm": 0.22024280724938608, | |
| "learning_rate": 3.283828382838284e-05, | |
| "loss": 0.3513, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.2316258351893095, | |
| "grad_norm": 0.23903906420041407, | |
| "learning_rate": 3.27970297029703e-05, | |
| "loss": 0.3552, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.2338530066815145, | |
| "grad_norm": 0.270801620970715, | |
| "learning_rate": 3.275577557755776e-05, | |
| "loss": 0.3688, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.2360801781737194, | |
| "grad_norm": 0.21905876480622277, | |
| "learning_rate": 3.2714521452145216e-05, | |
| "loss": 0.3445, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.2383073496659243, | |
| "grad_norm": 0.2403828223658685, | |
| "learning_rate": 3.2673267326732674e-05, | |
| "loss": 0.3511, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.2405345211581291, | |
| "grad_norm": 0.2456723575282447, | |
| "learning_rate": 3.263201320132013e-05, | |
| "loss": 0.3544, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.242761692650334, | |
| "grad_norm": 0.26448553284073184, | |
| "learning_rate": 3.259075907590759e-05, | |
| "loss": 0.3522, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.244988864142539, | |
| "grad_norm": 0.24073145486725794, | |
| "learning_rate": 3.2549504950495055e-05, | |
| "loss": 0.3508, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.247216035634744, | |
| "grad_norm": 0.27736400148415535, | |
| "learning_rate": 3.250825082508251e-05, | |
| "loss": 0.3648, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.2494432071269488, | |
| "grad_norm": 0.2728738915660453, | |
| "learning_rate": 3.2466996699669965e-05, | |
| "loss": 0.3704, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.2516703786191536, | |
| "grad_norm": 0.26300898311896, | |
| "learning_rate": 3.242574257425743e-05, | |
| "loss": 0.3668, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.2538975501113585, | |
| "grad_norm": 0.2548127745894571, | |
| "learning_rate": 3.238448844884489e-05, | |
| "loss": 0.3729, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.2561247216035634, | |
| "grad_norm": 0.1961809070235041, | |
| "learning_rate": 3.234323432343234e-05, | |
| "loss": 0.3527, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.2583518930957684, | |
| "grad_norm": 0.22948976382101388, | |
| "learning_rate": 3.2301980198019805e-05, | |
| "loss": 0.3367, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.2605790645879733, | |
| "grad_norm": 0.2716897968846777, | |
| "learning_rate": 3.226072607260726e-05, | |
| "loss": 0.3791, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.2628062360801782, | |
| "grad_norm": 0.23513234834544378, | |
| "learning_rate": 3.221947194719472e-05, | |
| "loss": 0.3693, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.265033407572383, | |
| "grad_norm": 0.24787013974811967, | |
| "learning_rate": 3.217821782178218e-05, | |
| "loss": 0.3384, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.267260579064588, | |
| "grad_norm": 0.25107053798110607, | |
| "learning_rate": 3.213696369636964e-05, | |
| "loss": 0.3677, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.269487750556793, | |
| "grad_norm": 0.20914988529919357, | |
| "learning_rate": 3.20957095709571e-05, | |
| "loss": 0.3499, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.2717149220489978, | |
| "grad_norm": 0.25215966314258914, | |
| "learning_rate": 3.2054455445544554e-05, | |
| "loss": 0.3491, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.2739420935412027, | |
| "grad_norm": 0.25325080210028006, | |
| "learning_rate": 3.201320132013201e-05, | |
| "loss": 0.3582, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.2761692650334076, | |
| "grad_norm": 0.21780047792534563, | |
| "learning_rate": 3.197194719471948e-05, | |
| "loss": 0.3474, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.2783964365256124, | |
| "grad_norm": 0.2214051811188461, | |
| "learning_rate": 3.1930693069306936e-05, | |
| "loss": 0.3552, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.2806236080178173, | |
| "grad_norm": 0.24543082087845347, | |
| "learning_rate": 3.188943894389439e-05, | |
| "loss": 0.3529, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.2828507795100224, | |
| "grad_norm": 0.20158614638718708, | |
| "learning_rate": 3.184818481848185e-05, | |
| "loss": 0.3434, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.2850779510022272, | |
| "grad_norm": 0.2296252359307742, | |
| "learning_rate": 3.180693069306931e-05, | |
| "loss": 0.3751, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.287305122494432, | |
| "grad_norm": 0.2185964200183311, | |
| "learning_rate": 3.176567656765677e-05, | |
| "loss": 0.367, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.289532293986637, | |
| "grad_norm": 0.2398417205034457, | |
| "learning_rate": 3.172442244224423e-05, | |
| "loss": 0.3573, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.2917594654788418, | |
| "grad_norm": 0.20606951067584606, | |
| "learning_rate": 3.1683168316831686e-05, | |
| "loss": 0.3605, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.2939866369710469, | |
| "grad_norm": 0.253402154414871, | |
| "learning_rate": 3.1641914191419144e-05, | |
| "loss": 0.385, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.2962138084632517, | |
| "grad_norm": 0.21569327617012055, | |
| "learning_rate": 3.16006600660066e-05, | |
| "loss": 0.3615, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.2984409799554566, | |
| "grad_norm": 0.20993771911712605, | |
| "learning_rate": 3.155940594059406e-05, | |
| "loss": 0.3467, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.3006681514476615, | |
| "grad_norm": 0.25707612252706463, | |
| "learning_rate": 3.151815181518152e-05, | |
| "loss": 0.3698, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.3028953229398663, | |
| "grad_norm": 0.23729147871242315, | |
| "learning_rate": 3.1476897689768984e-05, | |
| "loss": 0.3571, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.3051224944320712, | |
| "grad_norm": 0.2058740175214167, | |
| "learning_rate": 3.1435643564356435e-05, | |
| "loss": 0.3491, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.307349665924276, | |
| "grad_norm": 0.25787314593557403, | |
| "learning_rate": 3.139438943894389e-05, | |
| "loss": 0.3729, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.3095768374164811, | |
| "grad_norm": 0.23567481362489415, | |
| "learning_rate": 3.135313531353136e-05, | |
| "loss": 0.3674, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.311804008908686, | |
| "grad_norm": 0.2103686872656417, | |
| "learning_rate": 3.131188118811882e-05, | |
| "loss": 0.3546, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.3140311804008908, | |
| "grad_norm": 0.2246018723719588, | |
| "learning_rate": 3.127062706270627e-05, | |
| "loss": 0.3342, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.3162583518930957, | |
| "grad_norm": 0.2550281829523844, | |
| "learning_rate": 3.122937293729373e-05, | |
| "loss": 0.3604, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.3184855233853008, | |
| "grad_norm": 0.22197952276250127, | |
| "learning_rate": 3.118811881188119e-05, | |
| "loss": 0.3429, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.3207126948775056, | |
| "grad_norm": 0.25880191441627104, | |
| "learning_rate": 3.114686468646865e-05, | |
| "loss": 0.3461, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.3229398663697105, | |
| "grad_norm": 0.22702691621590193, | |
| "learning_rate": 3.110561056105611e-05, | |
| "loss": 0.3584, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.3251670378619154, | |
| "grad_norm": 0.23337868994919198, | |
| "learning_rate": 3.1064356435643566e-05, | |
| "loss": 0.3791, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.3273942093541202, | |
| "grad_norm": 0.24931669973599038, | |
| "learning_rate": 3.1023102310231024e-05, | |
| "loss": 0.358, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.329621380846325, | |
| "grad_norm": 0.21248967636665253, | |
| "learning_rate": 3.098184818481848e-05, | |
| "loss": 0.3333, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.33184855233853, | |
| "grad_norm": 0.22273911325503096, | |
| "learning_rate": 3.094059405940594e-05, | |
| "loss": 0.3763, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.334075723830735, | |
| "grad_norm": 0.2314939505997935, | |
| "learning_rate": 3.08993399339934e-05, | |
| "loss": 0.3507, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.3363028953229399, | |
| "grad_norm": 0.22611256534493768, | |
| "learning_rate": 3.0858085808580864e-05, | |
| "loss": 0.3657, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.3385300668151447, | |
| "grad_norm": 0.21856148705704515, | |
| "learning_rate": 3.0816831683168316e-05, | |
| "loss": 0.3602, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.3407572383073496, | |
| "grad_norm": 0.2418827295907198, | |
| "learning_rate": 3.0775577557755774e-05, | |
| "loss": 0.3382, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.3429844097995547, | |
| "grad_norm": 0.23647807359827352, | |
| "learning_rate": 3.073432343234324e-05, | |
| "loss": 0.3602, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.3452115812917596, | |
| "grad_norm": 0.22924025988751517, | |
| "learning_rate": 3.06930693069307e-05, | |
| "loss": 0.3572, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.3474387527839644, | |
| "grad_norm": 0.22831615741384578, | |
| "learning_rate": 3.065181518151815e-05, | |
| "loss": 0.36, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.3496659242761693, | |
| "grad_norm": 0.24207308088696777, | |
| "learning_rate": 3.0610561056105614e-05, | |
| "loss": 0.3591, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.3518930957683741, | |
| "grad_norm": 0.22533322573657474, | |
| "learning_rate": 3.056930693069307e-05, | |
| "loss": 0.3783, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.354120267260579, | |
| "grad_norm": 0.22354237123525514, | |
| "learning_rate": 3.052805280528053e-05, | |
| "loss": 0.3646, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.3563474387527839, | |
| "grad_norm": 0.21082532615883712, | |
| "learning_rate": 3.048679867986799e-05, | |
| "loss": 0.3685, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.358574610244989, | |
| "grad_norm": 0.21724487552837043, | |
| "learning_rate": 3.0445544554455447e-05, | |
| "loss": 0.3637, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.3608017817371938, | |
| "grad_norm": 0.2282169646639709, | |
| "learning_rate": 3.0404290429042902e-05, | |
| "loss": 0.361, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.3630289532293987, | |
| "grad_norm": 0.20533332057422468, | |
| "learning_rate": 3.0363036303630367e-05, | |
| "loss": 0.3335, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.3652561247216035, | |
| "grad_norm": 0.21314989490113376, | |
| "learning_rate": 3.032178217821782e-05, | |
| "loss": 0.3592, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.3674832962138086, | |
| "grad_norm": 0.2141447141940089, | |
| "learning_rate": 3.028052805280528e-05, | |
| "loss": 0.3359, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.3697104677060135, | |
| "grad_norm": 0.24581117751522166, | |
| "learning_rate": 3.023927392739274e-05, | |
| "loss": 0.3716, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.3719376391982183, | |
| "grad_norm": 0.2275094549967322, | |
| "learning_rate": 3.01980198019802e-05, | |
| "loss": 0.3553, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.3741648106904232, | |
| "grad_norm": 0.23356205976864639, | |
| "learning_rate": 3.015676567656766e-05, | |
| "loss": 0.3482, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.376391982182628, | |
| "grad_norm": 0.23882460698057495, | |
| "learning_rate": 3.0115511551155116e-05, | |
| "loss": 0.3579, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.378619153674833, | |
| "grad_norm": 0.23482925204649976, | |
| "learning_rate": 3.0074257425742575e-05, | |
| "loss": 0.3467, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.3808463251670378, | |
| "grad_norm": 0.22272122322754917, | |
| "learning_rate": 3.0033003300330036e-05, | |
| "loss": 0.3558, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.3830734966592428, | |
| "grad_norm": 0.2417717293854326, | |
| "learning_rate": 2.9991749174917494e-05, | |
| "loss": 0.3399, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.3853006681514477, | |
| "grad_norm": 0.21560732990542106, | |
| "learning_rate": 2.995049504950495e-05, | |
| "loss": 0.3263, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.3875278396436526, | |
| "grad_norm": 0.23511847998843455, | |
| "learning_rate": 2.9909240924092414e-05, | |
| "loss": 0.357, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.3897550111358574, | |
| "grad_norm": 0.2419549890007216, | |
| "learning_rate": 2.986798679867987e-05, | |
| "loss": 0.3376, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.3919821826280623, | |
| "grad_norm": 0.2418915221005408, | |
| "learning_rate": 2.9826732673267327e-05, | |
| "loss": 0.3559, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.3942093541202674, | |
| "grad_norm": 0.2219366354806474, | |
| "learning_rate": 2.978547854785479e-05, | |
| "loss": 0.3607, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.3964365256124722, | |
| "grad_norm": 0.22151027055415365, | |
| "learning_rate": 2.9744224422442247e-05, | |
| "loss": 0.3421, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 1.398663697104677, | |
| "grad_norm": 0.2818296636352545, | |
| "learning_rate": 2.9702970297029702e-05, | |
| "loss": 0.3598, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.400890868596882, | |
| "grad_norm": 0.22608544158053545, | |
| "learning_rate": 2.9661716171617164e-05, | |
| "loss": 0.3757, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 1.4031180400890868, | |
| "grad_norm": 0.2542515757235494, | |
| "learning_rate": 2.9620462046204622e-05, | |
| "loss": 0.3566, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.4053452115812917, | |
| "grad_norm": 0.22082951585632452, | |
| "learning_rate": 2.957920792079208e-05, | |
| "loss": 0.3532, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 1.4075723830734965, | |
| "grad_norm": 0.2152525241114565, | |
| "learning_rate": 2.9537953795379542e-05, | |
| "loss": 0.3476, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.4097995545657016, | |
| "grad_norm": 0.22504727209571587, | |
| "learning_rate": 2.9496699669966997e-05, | |
| "loss": 0.3506, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 1.4120267260579065, | |
| "grad_norm": 0.23833476284032978, | |
| "learning_rate": 2.9455445544554455e-05, | |
| "loss": 0.3698, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.4142538975501113, | |
| "grad_norm": 0.21086039461299577, | |
| "learning_rate": 2.9414191419141917e-05, | |
| "loss": 0.366, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.4164810690423162, | |
| "grad_norm": 0.209689633966253, | |
| "learning_rate": 2.9372937293729375e-05, | |
| "loss": 0.3464, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.4187082405345213, | |
| "grad_norm": 0.24474710567495397, | |
| "learning_rate": 2.933168316831683e-05, | |
| "loss": 0.3532, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 1.4209354120267261, | |
| "grad_norm": 0.22400192345008954, | |
| "learning_rate": 2.9290429042904295e-05, | |
| "loss": 0.3655, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.423162583518931, | |
| "grad_norm": 0.2058797826542391, | |
| "learning_rate": 2.924917491749175e-05, | |
| "loss": 0.3604, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.4253897550111359, | |
| "grad_norm": 0.2063600373971181, | |
| "learning_rate": 2.9207920792079208e-05, | |
| "loss": 0.3544, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.4276169265033407, | |
| "grad_norm": 0.19789395719309963, | |
| "learning_rate": 2.916666666666667e-05, | |
| "loss": 0.3443, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.4298440979955456, | |
| "grad_norm": 0.19534241146623657, | |
| "learning_rate": 2.9125412541254128e-05, | |
| "loss": 0.3558, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.4320712694877504, | |
| "grad_norm": 0.20204042866647118, | |
| "learning_rate": 2.9084158415841583e-05, | |
| "loss": 0.3696, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.4342984409799555, | |
| "grad_norm": 0.22097812615526374, | |
| "learning_rate": 2.9042904290429045e-05, | |
| "loss": 0.3601, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.4365256124721604, | |
| "grad_norm": 0.19680712562974112, | |
| "learning_rate": 2.9001650165016503e-05, | |
| "loss": 0.3398, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.4387527839643652, | |
| "grad_norm": 0.20759775269766909, | |
| "learning_rate": 2.896039603960396e-05, | |
| "loss": 0.3478, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.44097995545657, | |
| "grad_norm": 0.2345285222857468, | |
| "learning_rate": 2.8919141914191423e-05, | |
| "loss": 0.3636, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.4432071269487752, | |
| "grad_norm": 0.22986722410151925, | |
| "learning_rate": 2.8877887788778878e-05, | |
| "loss": 0.3724, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.44543429844098, | |
| "grad_norm": 0.23105094408176388, | |
| "learning_rate": 2.8836633663366336e-05, | |
| "loss": 0.3548, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.447661469933185, | |
| "grad_norm": 0.25829463050442975, | |
| "learning_rate": 2.8795379537953797e-05, | |
| "loss": 0.3719, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.4498886414253898, | |
| "grad_norm": 0.424453313230707, | |
| "learning_rate": 2.8754125412541256e-05, | |
| "loss": 0.3745, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.4521158129175946, | |
| "grad_norm": 0.2546120321330925, | |
| "learning_rate": 2.871287128712871e-05, | |
| "loss": 0.372, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.4543429844097995, | |
| "grad_norm": 0.23383800944478716, | |
| "learning_rate": 2.8671617161716176e-05, | |
| "loss": 0.3525, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.4565701559020043, | |
| "grad_norm": 7.318371253265792, | |
| "learning_rate": 2.863036303630363e-05, | |
| "loss": 0.3722, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.4587973273942094, | |
| "grad_norm": 0.28892038696657446, | |
| "learning_rate": 2.858910891089109e-05, | |
| "loss": 0.3373, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.4610244988864143, | |
| "grad_norm": 0.278423963067231, | |
| "learning_rate": 2.854785478547855e-05, | |
| "loss": 0.3519, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.4632516703786191, | |
| "grad_norm": 0.20740580993529, | |
| "learning_rate": 2.850660066006601e-05, | |
| "loss": 0.3452, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.465478841870824, | |
| "grad_norm": 0.2559023935627639, | |
| "learning_rate": 2.8465346534653464e-05, | |
| "loss": 0.3542, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.467706013363029, | |
| "grad_norm": 0.2494031935968005, | |
| "learning_rate": 2.8424092409240925e-05, | |
| "loss": 0.3554, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.469933184855234, | |
| "grad_norm": 0.21827118145506857, | |
| "learning_rate": 2.8382838283828383e-05, | |
| "loss": 0.3652, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.4721603563474388, | |
| "grad_norm": 0.24533730978200863, | |
| "learning_rate": 2.834158415841584e-05, | |
| "loss": 0.3598, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 1.4743875278396437, | |
| "grad_norm": 0.25328154863191904, | |
| "learning_rate": 2.8300330033003303e-05, | |
| "loss": 0.3544, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.4766146993318485, | |
| "grad_norm": 0.2668011805645278, | |
| "learning_rate": 2.8259075907590758e-05, | |
| "loss": 0.3596, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 1.4788418708240534, | |
| "grad_norm": 0.22271391411019326, | |
| "learning_rate": 2.8217821782178216e-05, | |
| "loss": 0.3441, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.4810690423162582, | |
| "grad_norm": 0.25938717488194835, | |
| "learning_rate": 2.8176567656765678e-05, | |
| "loss": 0.3658, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.4832962138084633, | |
| "grad_norm": 0.20781850068281127, | |
| "learning_rate": 2.8135313531353136e-05, | |
| "loss": 0.3721, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.4855233853006682, | |
| "grad_norm": 0.23085833582398785, | |
| "learning_rate": 2.8094059405940598e-05, | |
| "loss": 0.354, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 1.487750556792873, | |
| "grad_norm": 0.2656626528844188, | |
| "learning_rate": 2.8052805280528056e-05, | |
| "loss": 0.3638, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 1.489977728285078, | |
| "grad_norm": 0.24044869806682156, | |
| "learning_rate": 2.801155115511551e-05, | |
| "loss": 0.3659, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 1.492204899777283, | |
| "grad_norm": 0.21472096623862313, | |
| "learning_rate": 2.7970297029702973e-05, | |
| "loss": 0.3676, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.4944320712694878, | |
| "grad_norm": 0.24686435809063587, | |
| "learning_rate": 2.792904290429043e-05, | |
| "loss": 0.3585, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 1.4966592427616927, | |
| "grad_norm": 0.24840471134095787, | |
| "learning_rate": 2.788778877887789e-05, | |
| "loss": 0.3664, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 1.4988864142538976, | |
| "grad_norm": 0.2215556073459033, | |
| "learning_rate": 2.784653465346535e-05, | |
| "loss": 0.3551, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 1.5011135857461024, | |
| "grad_norm": 0.2671878307827319, | |
| "learning_rate": 2.7805280528052806e-05, | |
| "loss": 0.3395, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 1.5033407572383073, | |
| "grad_norm": 0.22945942117384913, | |
| "learning_rate": 2.7764026402640264e-05, | |
| "loss": 0.3753, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.5055679287305122, | |
| "grad_norm": 0.3375181873255935, | |
| "learning_rate": 2.7722772277227726e-05, | |
| "loss": 0.365, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 1.507795100222717, | |
| "grad_norm": 0.22619237117388163, | |
| "learning_rate": 2.7681518151815184e-05, | |
| "loss": 0.3488, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 1.510022271714922, | |
| "grad_norm": 0.34134705084759415, | |
| "learning_rate": 2.764026402640264e-05, | |
| "loss": 0.3562, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 1.512249443207127, | |
| "grad_norm": 0.2377850642522043, | |
| "learning_rate": 2.7599009900990104e-05, | |
| "loss": 0.3564, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 1.5144766146993318, | |
| "grad_norm": 0.22461681967594943, | |
| "learning_rate": 2.755775577557756e-05, | |
| "loss": 0.375, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.516703786191537, | |
| "grad_norm": 0.27346902302943626, | |
| "learning_rate": 2.7516501650165017e-05, | |
| "loss": 0.3499, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 1.5189309576837418, | |
| "grad_norm": 0.2622303223021103, | |
| "learning_rate": 2.747524752475248e-05, | |
| "loss": 0.3742, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 1.5211581291759466, | |
| "grad_norm": 0.23462898998214818, | |
| "learning_rate": 2.7433993399339937e-05, | |
| "loss": 0.3425, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 1.5233853006681515, | |
| "grad_norm": 0.2416333741824504, | |
| "learning_rate": 2.7392739273927392e-05, | |
| "loss": 0.3611, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.5256124721603563, | |
| "grad_norm": 0.24498854592268735, | |
| "learning_rate": 2.7351485148514853e-05, | |
| "loss": 0.3724, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.5278396436525612, | |
| "grad_norm": 0.21533813780649638, | |
| "learning_rate": 2.731023102310231e-05, | |
| "loss": 0.357, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 1.530066815144766, | |
| "grad_norm": 0.2198186097543753, | |
| "learning_rate": 2.726897689768977e-05, | |
| "loss": 0.3496, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 1.532293986636971, | |
| "grad_norm": 0.21972547732337955, | |
| "learning_rate": 2.722772277227723e-05, | |
| "loss": 0.3696, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 1.534521158129176, | |
| "grad_norm": 0.22960568812910168, | |
| "learning_rate": 2.7186468646864686e-05, | |
| "loss": 0.3638, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 1.5367483296213809, | |
| "grad_norm": 0.21587924800281283, | |
| "learning_rate": 2.7145214521452145e-05, | |
| "loss": 0.3461, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.5389755011135857, | |
| "grad_norm": 0.21450364367405003, | |
| "learning_rate": 2.7103960396039606e-05, | |
| "loss": 0.3505, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 1.5412026726057908, | |
| "grad_norm": 0.2272515582570843, | |
| "learning_rate": 2.7062706270627065e-05, | |
| "loss": 0.3566, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 1.5434298440979957, | |
| "grad_norm": 0.21731442522252906, | |
| "learning_rate": 2.702145214521452e-05, | |
| "loss": 0.3718, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 1.5456570155902005, | |
| "grad_norm": 0.23203118652825902, | |
| "learning_rate": 2.6980198019801985e-05, | |
| "loss": 0.3835, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.5478841870824054, | |
| "grad_norm": 0.24072617248442002, | |
| "learning_rate": 2.693894389438944e-05, | |
| "loss": 0.3567, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.5501113585746102, | |
| "grad_norm": 0.21211113741060783, | |
| "learning_rate": 2.6897689768976898e-05, | |
| "loss": 0.3621, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 1.552338530066815, | |
| "grad_norm": 0.22442534721044624, | |
| "learning_rate": 2.685643564356436e-05, | |
| "loss": 0.3552, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 1.55456570155902, | |
| "grad_norm": 0.22886218544287978, | |
| "learning_rate": 2.6815181518151818e-05, | |
| "loss": 0.3441, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 1.5567928730512248, | |
| "grad_norm": 0.21190605211030045, | |
| "learning_rate": 2.6773927392739272e-05, | |
| "loss": 0.3415, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 1.5590200445434297, | |
| "grad_norm": 0.21397403019649194, | |
| "learning_rate": 2.6732673267326734e-05, | |
| "loss": 0.3577, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.5612472160356348, | |
| "grad_norm": 0.20798407959268334, | |
| "learning_rate": 2.6691419141914192e-05, | |
| "loss": 0.3246, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 1.5634743875278396, | |
| "grad_norm": 0.22674241290780542, | |
| "learning_rate": 2.665016501650165e-05, | |
| "loss": 0.3308, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 1.5657015590200447, | |
| "grad_norm": 0.21971871603168083, | |
| "learning_rate": 2.6608910891089112e-05, | |
| "loss": 0.3386, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 1.5679287305122496, | |
| "grad_norm": 0.2371853987642058, | |
| "learning_rate": 2.6567656765676567e-05, | |
| "loss": 0.3496, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 1.5701559020044544, | |
| "grad_norm": 0.21379269248974136, | |
| "learning_rate": 2.6526402640264025e-05, | |
| "loss": 0.3481, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.5723830734966593, | |
| "grad_norm": 0.19585786330703275, | |
| "learning_rate": 2.6485148514851487e-05, | |
| "loss": 0.351, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 1.5746102449888641, | |
| "grad_norm": 0.2587421310900264, | |
| "learning_rate": 2.6443894389438945e-05, | |
| "loss": 0.3676, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 1.576837416481069, | |
| "grad_norm": 0.2585479760875973, | |
| "learning_rate": 2.64026402640264e-05, | |
| "loss": 0.3861, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 1.5790645879732739, | |
| "grad_norm": 0.2516216375907583, | |
| "learning_rate": 2.6361386138613865e-05, | |
| "loss": 0.3582, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 1.5812917594654787, | |
| "grad_norm": 0.20405466633047462, | |
| "learning_rate": 2.632013201320132e-05, | |
| "loss": 0.3483, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.5835189309576836, | |
| "grad_norm": 0.2475547863332457, | |
| "learning_rate": 2.6278877887788778e-05, | |
| "loss": 0.3615, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 1.5857461024498887, | |
| "grad_norm": 0.2081150380819684, | |
| "learning_rate": 2.623762376237624e-05, | |
| "loss": 0.3479, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 1.5879732739420935, | |
| "grad_norm": 0.24448725611838398, | |
| "learning_rate": 2.6196369636963698e-05, | |
| "loss": 0.3646, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 1.5902004454342984, | |
| "grad_norm": 0.26812892610271716, | |
| "learning_rate": 2.6155115511551153e-05, | |
| "loss": 0.3666, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 1.5924276169265035, | |
| "grad_norm": 0.21579759719455277, | |
| "learning_rate": 2.6113861386138615e-05, | |
| "loss": 0.3615, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.5946547884187083, | |
| "grad_norm": 0.25509286016118415, | |
| "learning_rate": 2.6072607260726073e-05, | |
| "loss": 0.3669, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 1.5968819599109132, | |
| "grad_norm": 0.24970731310233607, | |
| "learning_rate": 2.6031353135313535e-05, | |
| "loss": 0.3384, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 1.599109131403118, | |
| "grad_norm": 0.23967809915859692, | |
| "learning_rate": 2.5990099009900993e-05, | |
| "loss": 0.3748, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 1.601336302895323, | |
| "grad_norm": 0.2390244120163411, | |
| "learning_rate": 2.5948844884488448e-05, | |
| "loss": 0.3534, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 1.6035634743875278, | |
| "grad_norm": 0.2227378080348747, | |
| "learning_rate": 2.5907590759075913e-05, | |
| "loss": 0.3503, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.6057906458797326, | |
| "grad_norm": 0.22344303559182338, | |
| "learning_rate": 2.5866336633663368e-05, | |
| "loss": 0.3494, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 1.6080178173719375, | |
| "grad_norm": 0.2096719404959539, | |
| "learning_rate": 2.5825082508250826e-05, | |
| "loss": 0.3516, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 1.6102449888641426, | |
| "grad_norm": 0.23541364148816837, | |
| "learning_rate": 2.5783828382838288e-05, | |
| "loss": 0.3583, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 1.6124721603563474, | |
| "grad_norm": 0.24240489602243556, | |
| "learning_rate": 2.5742574257425746e-05, | |
| "loss": 0.3628, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 1.6146993318485523, | |
| "grad_norm": 0.23277176159424937, | |
| "learning_rate": 2.57013201320132e-05, | |
| "loss": 0.3619, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.6169265033407574, | |
| "grad_norm": 0.2261700410987079, | |
| "learning_rate": 2.5660066006600662e-05, | |
| "loss": 0.3539, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 1.6191536748329622, | |
| "grad_norm": 0.2475352723888899, | |
| "learning_rate": 2.561881188118812e-05, | |
| "loss": 0.3638, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 1.621380846325167, | |
| "grad_norm": 0.2637043817157399, | |
| "learning_rate": 2.557755775577558e-05, | |
| "loss": 0.3687, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 1.623608017817372, | |
| "grad_norm": 0.22209926097728006, | |
| "learning_rate": 2.553630363036304e-05, | |
| "loss": 0.3729, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 1.6258351893095768, | |
| "grad_norm": 0.22107752502682854, | |
| "learning_rate": 2.5495049504950495e-05, | |
| "loss": 0.3585, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.6280623608017817, | |
| "grad_norm": 0.23862184087559932, | |
| "learning_rate": 2.5453795379537954e-05, | |
| "loss": 0.3737, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 1.6302895322939865, | |
| "grad_norm": 0.22649801341135953, | |
| "learning_rate": 2.5412541254125415e-05, | |
| "loss": 0.3731, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 1.6325167037861914, | |
| "grad_norm": 0.22755105603708417, | |
| "learning_rate": 2.5371287128712873e-05, | |
| "loss": 0.3516, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 1.6347438752783965, | |
| "grad_norm": 0.19858185617627222, | |
| "learning_rate": 2.533003300330033e-05, | |
| "loss": 0.3607, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 1.6369710467706013, | |
| "grad_norm": 0.2297540445090836, | |
| "learning_rate": 2.5288778877887793e-05, | |
| "loss": 0.3424, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.6391982182628062, | |
| "grad_norm": 0.24870602060429842, | |
| "learning_rate": 2.5247524752475248e-05, | |
| "loss": 0.3715, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 1.6414253897550113, | |
| "grad_norm": 0.2024465654655418, | |
| "learning_rate": 2.5206270627062707e-05, | |
| "loss": 0.3591, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 1.6436525612472161, | |
| "grad_norm": 0.24668224515250928, | |
| "learning_rate": 2.5165016501650168e-05, | |
| "loss": 0.355, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 1.645879732739421, | |
| "grad_norm": 0.19057252993207835, | |
| "learning_rate": 2.5123762376237626e-05, | |
| "loss": 0.3433, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 1.6481069042316259, | |
| "grad_norm": 0.21552382920458873, | |
| "learning_rate": 2.508250825082508e-05, | |
| "loss": 0.3596, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.6503340757238307, | |
| "grad_norm": 0.22755184572320342, | |
| "learning_rate": 2.5041254125412543e-05, | |
| "loss": 0.3828, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 1.6525612472160356, | |
| "grad_norm": 0.20334457475049023, | |
| "learning_rate": 2.5e-05, | |
| "loss": 0.3695, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 1.6547884187082404, | |
| "grad_norm": 0.22480977110261696, | |
| "learning_rate": 2.495874587458746e-05, | |
| "loss": 0.384, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 1.6570155902004453, | |
| "grad_norm": 0.20554152248562454, | |
| "learning_rate": 2.4917491749174918e-05, | |
| "loss": 0.3502, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 1.6592427616926502, | |
| "grad_norm": 0.18831042914637963, | |
| "learning_rate": 2.4876237623762376e-05, | |
| "loss": 0.3398, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.6614699331848553, | |
| "grad_norm": 0.2197300674636681, | |
| "learning_rate": 2.4834983498349838e-05, | |
| "loss": 0.3325, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 1.6636971046770601, | |
| "grad_norm": 0.24074656874943445, | |
| "learning_rate": 2.4793729372937292e-05, | |
| "loss": 0.3802, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 1.6659242761692652, | |
| "grad_norm": 0.2137682640854006, | |
| "learning_rate": 2.4752475247524754e-05, | |
| "loss": 0.3511, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 1.66815144766147, | |
| "grad_norm": 0.1957331739172998, | |
| "learning_rate": 2.4711221122112212e-05, | |
| "loss": 0.3473, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 1.670378619153675, | |
| "grad_norm": 0.2164395963159786, | |
| "learning_rate": 2.466996699669967e-05, | |
| "loss": 0.3789, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.6726057906458798, | |
| "grad_norm": 0.22848414539053694, | |
| "learning_rate": 2.462871287128713e-05, | |
| "loss": 0.3611, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 1.6748329621380846, | |
| "grad_norm": 0.19882041874875156, | |
| "learning_rate": 2.458745874587459e-05, | |
| "loss": 0.3403, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 1.6770601336302895, | |
| "grad_norm": 0.20825632682323666, | |
| "learning_rate": 2.4546204620462045e-05, | |
| "loss": 0.3414, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 1.6792873051224944, | |
| "grad_norm": 0.23873860748628373, | |
| "learning_rate": 2.4504950495049507e-05, | |
| "loss": 0.3581, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 1.6815144766146992, | |
| "grad_norm": 0.22543112165583132, | |
| "learning_rate": 2.4463696369636965e-05, | |
| "loss": 0.3532, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.683741648106904, | |
| "grad_norm": 0.21800601765293173, | |
| "learning_rate": 2.4422442244224424e-05, | |
| "loss": 0.3676, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 1.6859688195991092, | |
| "grad_norm": 0.2140720368883988, | |
| "learning_rate": 2.4381188118811882e-05, | |
| "loss": 0.344, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 1.688195991091314, | |
| "grad_norm": 0.21486782545441024, | |
| "learning_rate": 2.433993399339934e-05, | |
| "loss": 0.3472, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 1.6904231625835189, | |
| "grad_norm": 0.21027644587528624, | |
| "learning_rate": 2.4298679867986802e-05, | |
| "loss": 0.3577, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 1.692650334075724, | |
| "grad_norm": 0.21417715876608553, | |
| "learning_rate": 2.4257425742574257e-05, | |
| "loss": 0.3573, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.6948775055679288, | |
| "grad_norm": 0.21757364057279047, | |
| "learning_rate": 2.4216171617161718e-05, | |
| "loss": 0.3652, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 1.6971046770601337, | |
| "grad_norm": 0.22363498217306269, | |
| "learning_rate": 2.4174917491749177e-05, | |
| "loss": 0.3551, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 1.6993318485523385, | |
| "grad_norm": 0.2119492697596465, | |
| "learning_rate": 2.4133663366336635e-05, | |
| "loss": 0.3607, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 1.7015590200445434, | |
| "grad_norm": 0.2096134105158527, | |
| "learning_rate": 2.4092409240924093e-05, | |
| "loss": 0.3391, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 1.7037861915367483, | |
| "grad_norm": 0.20537784757587624, | |
| "learning_rate": 2.4051155115511555e-05, | |
| "loss": 0.3677, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.7060133630289531, | |
| "grad_norm": 0.23113674389564584, | |
| "learning_rate": 2.400990099009901e-05, | |
| "loss": 0.3557, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 1.708240534521158, | |
| "grad_norm": 0.21121564485322036, | |
| "learning_rate": 2.396864686468647e-05, | |
| "loss": 0.3697, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 1.710467706013363, | |
| "grad_norm": 0.2258527543992175, | |
| "learning_rate": 2.392739273927393e-05, | |
| "loss": 0.3503, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 1.712694877505568, | |
| "grad_norm": 0.22888646361558582, | |
| "learning_rate": 2.3886138613861388e-05, | |
| "loss": 0.3589, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 1.7149220489977728, | |
| "grad_norm": 0.20663664144884927, | |
| "learning_rate": 2.3844884488448846e-05, | |
| "loss": 0.3519, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.7171492204899779, | |
| "grad_norm": 0.19925681560320907, | |
| "learning_rate": 2.3803630363036304e-05, | |
| "loss": 0.3455, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 1.7193763919821827, | |
| "grad_norm": 0.20251366706043314, | |
| "learning_rate": 2.3762376237623762e-05, | |
| "loss": 0.3592, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 1.7216035634743876, | |
| "grad_norm": 0.22138065506078447, | |
| "learning_rate": 2.372112211221122e-05, | |
| "loss": 0.3696, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 1.7238307349665924, | |
| "grad_norm": 0.2165757188295888, | |
| "learning_rate": 2.3679867986798682e-05, | |
| "loss": 0.3454, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 1.7260579064587973, | |
| "grad_norm": 0.204436241685765, | |
| "learning_rate": 2.3638613861386137e-05, | |
| "loss": 0.3504, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.7282850779510022, | |
| "grad_norm": 0.2517471400589238, | |
| "learning_rate": 2.35973597359736e-05, | |
| "loss": 0.3681, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 1.730512249443207, | |
| "grad_norm": 0.21082242020594905, | |
| "learning_rate": 2.3556105610561057e-05, | |
| "loss": 0.3522, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 1.732739420935412, | |
| "grad_norm": 0.31274971698593285, | |
| "learning_rate": 2.3514851485148515e-05, | |
| "loss": 0.3392, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 1.734966592427617, | |
| "grad_norm": 0.21797373750156024, | |
| "learning_rate": 2.3473597359735974e-05, | |
| "loss": 0.3464, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 1.7371937639198218, | |
| "grad_norm": 0.22426124926037705, | |
| "learning_rate": 2.3432343234323435e-05, | |
| "loss": 0.3381, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.7394209354120267, | |
| "grad_norm": 0.20958226019028067, | |
| "learning_rate": 2.3391089108910894e-05, | |
| "loss": 0.3613, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 1.7416481069042318, | |
| "grad_norm": 0.21997431509131082, | |
| "learning_rate": 2.3349834983498352e-05, | |
| "loss": 0.3414, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 1.7438752783964366, | |
| "grad_norm": 0.2585047283057077, | |
| "learning_rate": 2.330858085808581e-05, | |
| "loss": 0.3525, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 1.7461024498886415, | |
| "grad_norm": 0.1973554212874199, | |
| "learning_rate": 2.326732673267327e-05, | |
| "loss": 0.3453, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 1.7483296213808464, | |
| "grad_norm": 0.20148204624557856, | |
| "learning_rate": 2.3226072607260727e-05, | |
| "loss": 0.3506, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.7505567928730512, | |
| "grad_norm": 0.22456688219605642, | |
| "learning_rate": 2.3184818481848185e-05, | |
| "loss": 0.3706, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 1.752783964365256, | |
| "grad_norm": 0.23859155562530457, | |
| "learning_rate": 2.3143564356435647e-05, | |
| "loss": 0.3667, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 1.755011135857461, | |
| "grad_norm": 0.20273679068782885, | |
| "learning_rate": 2.31023102310231e-05, | |
| "loss": 0.3559, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 1.7572383073496658, | |
| "grad_norm": 0.23549903462162344, | |
| "learning_rate": 2.3061056105610563e-05, | |
| "loss": 0.3572, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 1.7594654788418709, | |
| "grad_norm": 0.20664546271924084, | |
| "learning_rate": 2.301980198019802e-05, | |
| "loss": 0.3669, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.7616926503340757, | |
| "grad_norm": 0.18573699358086376, | |
| "learning_rate": 2.297854785478548e-05, | |
| "loss": 0.3614, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 1.7639198218262806, | |
| "grad_norm": 0.1903401493341969, | |
| "learning_rate": 2.2937293729372938e-05, | |
| "loss": 0.3579, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 1.7661469933184857, | |
| "grad_norm": 0.2294105880619965, | |
| "learning_rate": 2.28960396039604e-05, | |
| "loss": 0.3821, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 1.7683741648106905, | |
| "grad_norm": 0.18707069115216823, | |
| "learning_rate": 2.2854785478547854e-05, | |
| "loss": 0.3464, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 1.7706013363028954, | |
| "grad_norm": 0.1928806368608763, | |
| "learning_rate": 2.2813531353135316e-05, | |
| "loss": 0.3471, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.7728285077951003, | |
| "grad_norm": 0.19157137046568395, | |
| "learning_rate": 2.2772277227722774e-05, | |
| "loss": 0.3484, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 1.7750556792873051, | |
| "grad_norm": 0.1877096266003065, | |
| "learning_rate": 2.2731023102310232e-05, | |
| "loss": 0.3427, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 1.77728285077951, | |
| "grad_norm": 0.23812477838664892, | |
| "learning_rate": 2.268976897689769e-05, | |
| "loss": 0.3638, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 1.7795100222717148, | |
| "grad_norm": 0.22681495216210937, | |
| "learning_rate": 2.264851485148515e-05, | |
| "loss": 0.3379, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 1.7817371937639197, | |
| "grad_norm": 0.19704186122557504, | |
| "learning_rate": 2.2607260726072607e-05, | |
| "loss": 0.3397, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.7839643652561246, | |
| "grad_norm": 0.22884211956510753, | |
| "learning_rate": 2.2566006600660065e-05, | |
| "loss": 0.3714, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 1.7861915367483296, | |
| "grad_norm": 0.21565251967603208, | |
| "learning_rate": 2.2524752475247527e-05, | |
| "loss": 0.3491, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 1.7884187082405345, | |
| "grad_norm": 0.2114423583255959, | |
| "learning_rate": 2.2483498349834982e-05, | |
| "loss": 0.3499, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 1.7906458797327396, | |
| "grad_norm": 0.20991719559533167, | |
| "learning_rate": 2.2442244224422444e-05, | |
| "loss": 0.342, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 1.7928730512249444, | |
| "grad_norm": 0.20128216471513238, | |
| "learning_rate": 2.2400990099009902e-05, | |
| "loss": 0.338, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.7951002227171493, | |
| "grad_norm": 0.213830139777128, | |
| "learning_rate": 2.2359735973597364e-05, | |
| "loss": 0.3618, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 1.7973273942093542, | |
| "grad_norm": 0.21259676449647513, | |
| "learning_rate": 2.231848184818482e-05, | |
| "loss": 0.3691, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 1.799554565701559, | |
| "grad_norm": 0.20495606063114558, | |
| "learning_rate": 2.227722772277228e-05, | |
| "loss": 0.3513, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 1.8017817371937639, | |
| "grad_norm": 0.22136308674013255, | |
| "learning_rate": 2.223597359735974e-05, | |
| "loss": 0.3619, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 1.8040089086859687, | |
| "grad_norm": 0.20024726503318607, | |
| "learning_rate": 2.2194719471947197e-05, | |
| "loss": 0.3371, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.8062360801781736, | |
| "grad_norm": 0.21344216526329354, | |
| "learning_rate": 2.2153465346534655e-05, | |
| "loss": 0.342, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 1.8084632516703785, | |
| "grad_norm": 0.2088397853784964, | |
| "learning_rate": 2.2112211221122113e-05, | |
| "loss": 0.3625, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 1.8106904231625836, | |
| "grad_norm": 0.18187149160175733, | |
| "learning_rate": 2.207095709570957e-05, | |
| "loss": 0.3346, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 1.8129175946547884, | |
| "grad_norm": 0.23299910264582777, | |
| "learning_rate": 2.202970297029703e-05, | |
| "loss": 0.3541, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 1.8151447661469933, | |
| "grad_norm": 0.20028677312315757, | |
| "learning_rate": 2.198844884488449e-05, | |
| "loss": 0.3453, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 1.8173719376391984, | |
| "grad_norm": 0.18263471009850116, | |
| "learning_rate": 2.1947194719471946e-05, | |
| "loss": 0.3316, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 1.8195991091314032, | |
| "grad_norm": 0.22062543651918515, | |
| "learning_rate": 2.1905940594059408e-05, | |
| "loss": 0.3658, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 1.821826280623608, | |
| "grad_norm": 0.22688623750486514, | |
| "learning_rate": 2.1864686468646866e-05, | |
| "loss": 0.3511, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 1.824053452115813, | |
| "grad_norm": 0.1879569517915985, | |
| "learning_rate": 2.1823432343234324e-05, | |
| "loss": 0.3217, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 1.8262806236080178, | |
| "grad_norm": 0.1940381066075902, | |
| "learning_rate": 2.1782178217821783e-05, | |
| "loss": 0.3419, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.8285077951002227, | |
| "grad_norm": 0.2168695271382076, | |
| "learning_rate": 2.1740924092409244e-05, | |
| "loss": 0.3529, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 1.8307349665924275, | |
| "grad_norm": 0.20486712169741159, | |
| "learning_rate": 2.16996699669967e-05, | |
| "loss": 0.3575, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 1.8329621380846324, | |
| "grad_norm": 0.1883050227368406, | |
| "learning_rate": 2.165841584158416e-05, | |
| "loss": 0.352, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 1.8351893095768375, | |
| "grad_norm": 0.23082829161307306, | |
| "learning_rate": 2.161716171617162e-05, | |
| "loss": 0.3663, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 1.8374164810690423, | |
| "grad_norm": 0.2307482347765725, | |
| "learning_rate": 2.1575907590759077e-05, | |
| "loss": 0.3544, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.8396436525612472, | |
| "grad_norm": 0.19843248597663882, | |
| "learning_rate": 2.1534653465346535e-05, | |
| "loss": 0.3569, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 1.8418708240534523, | |
| "grad_norm": 0.21195874697924322, | |
| "learning_rate": 2.1493399339933994e-05, | |
| "loss": 0.3645, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 1.8440979955456571, | |
| "grad_norm": 0.256220949643074, | |
| "learning_rate": 2.1452145214521452e-05, | |
| "loss": 0.3742, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 1.846325167037862, | |
| "grad_norm": 0.22055038762020165, | |
| "learning_rate": 2.141089108910891e-05, | |
| "loss": 0.348, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 1.8485523385300668, | |
| "grad_norm": 0.22906582912580156, | |
| "learning_rate": 2.1369636963696372e-05, | |
| "loss": 0.3366, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.8507795100222717, | |
| "grad_norm": 0.20269610390285273, | |
| "learning_rate": 2.132838283828383e-05, | |
| "loss": 0.3629, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 1.8530066815144766, | |
| "grad_norm": 0.20815631614968835, | |
| "learning_rate": 2.128712871287129e-05, | |
| "loss": 0.3787, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 1.8552338530066814, | |
| "grad_norm": 0.21411980143547962, | |
| "learning_rate": 2.1245874587458747e-05, | |
| "loss": 0.3623, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 1.8574610244988863, | |
| "grad_norm": 0.19020683521295328, | |
| "learning_rate": 2.120462046204621e-05, | |
| "loss": 0.3499, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 1.8596881959910914, | |
| "grad_norm": 0.19331331092632276, | |
| "learning_rate": 2.1163366336633663e-05, | |
| "loss": 0.3631, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 1.8619153674832962, | |
| "grad_norm": 0.18899933504226676, | |
| "learning_rate": 2.1122112211221125e-05, | |
| "loss": 0.3448, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 1.864142538975501, | |
| "grad_norm": 0.21380273552379628, | |
| "learning_rate": 2.1080858085808583e-05, | |
| "loss": 0.3489, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 1.8663697104677062, | |
| "grad_norm": 0.18658613921707976, | |
| "learning_rate": 2.103960396039604e-05, | |
| "loss": 0.3391, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 1.868596881959911, | |
| "grad_norm": 0.18984197833204985, | |
| "learning_rate": 2.09983498349835e-05, | |
| "loss": 0.3498, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 1.8708240534521159, | |
| "grad_norm": 0.19697181465612057, | |
| "learning_rate": 2.0957095709570958e-05, | |
| "loss": 0.341, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.8730512249443207, | |
| "grad_norm": 0.1931060808637572, | |
| "learning_rate": 2.0915841584158416e-05, | |
| "loss": 0.3513, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 1.8752783964365256, | |
| "grad_norm": 0.20233431207039582, | |
| "learning_rate": 2.0874587458745874e-05, | |
| "loss": 0.3662, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 1.8775055679287305, | |
| "grad_norm": 0.18786521609001686, | |
| "learning_rate": 2.0833333333333336e-05, | |
| "loss": 0.3614, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 1.8797327394209353, | |
| "grad_norm": 0.2091422737597329, | |
| "learning_rate": 2.079207920792079e-05, | |
| "loss": 0.366, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 1.8819599109131402, | |
| "grad_norm": 0.2150767180245874, | |
| "learning_rate": 2.0750825082508253e-05, | |
| "loss": 0.3357, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 1.884187082405345, | |
| "grad_norm": 0.19410541083903396, | |
| "learning_rate": 2.070957095709571e-05, | |
| "loss": 0.3532, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 1.8864142538975501, | |
| "grad_norm": 0.19933649104227888, | |
| "learning_rate": 2.066831683168317e-05, | |
| "loss": 0.3451, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 1.888641425389755, | |
| "grad_norm": 0.22397787665117305, | |
| "learning_rate": 2.0627062706270627e-05, | |
| "loss": 0.3613, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 1.89086859688196, | |
| "grad_norm": 0.5256275726743158, | |
| "learning_rate": 2.058580858085809e-05, | |
| "loss": 0.3539, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 1.893095768374165, | |
| "grad_norm": 0.19385487622928163, | |
| "learning_rate": 2.0544554455445544e-05, | |
| "loss": 0.3422, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.8953229398663698, | |
| "grad_norm": 0.24260306396119488, | |
| "learning_rate": 2.0503300330033005e-05, | |
| "loss": 0.3573, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 1.8975501113585747, | |
| "grad_norm": 0.20760454513198467, | |
| "learning_rate": 2.0462046204620464e-05, | |
| "loss": 0.3542, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 1.8997772828507795, | |
| "grad_norm": 0.19936257212107614, | |
| "learning_rate": 2.0420792079207922e-05, | |
| "loss": 0.3383, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 1.9020044543429844, | |
| "grad_norm": 0.19183978964833118, | |
| "learning_rate": 2.037953795379538e-05, | |
| "loss": 0.3503, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 1.9042316258351892, | |
| "grad_norm": 0.21365631815045058, | |
| "learning_rate": 2.033828382838284e-05, | |
| "loss": 0.3656, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 1.906458797327394, | |
| "grad_norm": 0.2024022437530225, | |
| "learning_rate": 2.02970297029703e-05, | |
| "loss": 0.3527, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 1.908685968819599, | |
| "grad_norm": 0.20226674151760932, | |
| "learning_rate": 2.0255775577557755e-05, | |
| "loss": 0.3446, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 1.910913140311804, | |
| "grad_norm": 0.227127328970819, | |
| "learning_rate": 2.0214521452145217e-05, | |
| "loss": 0.3536, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 1.913140311804009, | |
| "grad_norm": 0.2197239772687608, | |
| "learning_rate": 2.0173267326732675e-05, | |
| "loss": 0.3617, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 1.9153674832962138, | |
| "grad_norm": 0.22087403838276748, | |
| "learning_rate": 2.0132013201320133e-05, | |
| "loss": 0.3681, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.9175946547884188, | |
| "grad_norm": 0.1995669977450703, | |
| "learning_rate": 2.009075907590759e-05, | |
| "loss": 0.3358, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 1.9198218262806237, | |
| "grad_norm": 0.21117604925991532, | |
| "learning_rate": 2.0049504950495053e-05, | |
| "loss": 0.365, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 1.9220489977728286, | |
| "grad_norm": 0.21954207043818752, | |
| "learning_rate": 2.0008250825082508e-05, | |
| "loss": 0.3609, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 1.9242761692650334, | |
| "grad_norm": 0.22316141925777574, | |
| "learning_rate": 1.996699669966997e-05, | |
| "loss": 0.3527, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 1.9265033407572383, | |
| "grad_norm": 0.23643909690979611, | |
| "learning_rate": 1.9925742574257428e-05, | |
| "loss": 0.3547, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 1.9287305122494431, | |
| "grad_norm": 0.21579483005725839, | |
| "learning_rate": 1.9884488448844886e-05, | |
| "loss": 0.3411, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 1.930957683741648, | |
| "grad_norm": 0.21575285488649312, | |
| "learning_rate": 1.9843234323432344e-05, | |
| "loss": 0.3427, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 1.9331848552338529, | |
| "grad_norm": 0.22376247499518298, | |
| "learning_rate": 1.9801980198019803e-05, | |
| "loss": 0.3519, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 1.935412026726058, | |
| "grad_norm": 0.21499454175201782, | |
| "learning_rate": 1.976072607260726e-05, | |
| "loss": 0.3468, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 1.9376391982182628, | |
| "grad_norm": 0.495687391678954, | |
| "learning_rate": 1.971947194719472e-05, | |
| "loss": 0.3615, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.9398663697104677, | |
| "grad_norm": 0.20809205707497128, | |
| "learning_rate": 1.967821782178218e-05, | |
| "loss": 0.3362, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 1.9420935412026727, | |
| "grad_norm": 0.22896154103126082, | |
| "learning_rate": 1.9636963696369636e-05, | |
| "loss": 0.3526, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 1.9443207126948776, | |
| "grad_norm": 0.21096464134793458, | |
| "learning_rate": 1.9595709570957097e-05, | |
| "loss": 0.3405, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 1.9465478841870825, | |
| "grad_norm": 0.24066397750177496, | |
| "learning_rate": 1.9554455445544556e-05, | |
| "loss": 0.3726, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 1.9487750556792873, | |
| "grad_norm": 0.210675877389854, | |
| "learning_rate": 1.9513201320132014e-05, | |
| "loss": 0.3632, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.9510022271714922, | |
| "grad_norm": 0.25099975657337337, | |
| "learning_rate": 1.9471947194719472e-05, | |
| "loss": 0.3477, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 1.953229398663697, | |
| "grad_norm": 0.21434420701597884, | |
| "learning_rate": 1.9430693069306934e-05, | |
| "loss": 0.3519, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 1.955456570155902, | |
| "grad_norm": 0.22837925031767414, | |
| "learning_rate": 1.938943894389439e-05, | |
| "loss": 0.3518, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 1.9576837416481068, | |
| "grad_norm": 0.20704495835176406, | |
| "learning_rate": 1.934818481848185e-05, | |
| "loss": 0.3542, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 1.9599109131403119, | |
| "grad_norm": 0.2065597872221957, | |
| "learning_rate": 1.930693069306931e-05, | |
| "loss": 0.3588, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.9621380846325167, | |
| "grad_norm": 0.21160630744172634, | |
| "learning_rate": 1.9265676567656767e-05, | |
| "loss": 0.3493, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 1.9643652561247216, | |
| "grad_norm": 0.1982684867710196, | |
| "learning_rate": 1.9224422442244225e-05, | |
| "loss": 0.3717, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 1.9665924276169267, | |
| "grad_norm": 0.193658353418164, | |
| "learning_rate": 1.9183168316831683e-05, | |
| "loss": 0.3363, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 1.9688195991091315, | |
| "grad_norm": 0.23119980336159962, | |
| "learning_rate": 1.9141914191419145e-05, | |
| "loss": 0.3672, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 1.9710467706013364, | |
| "grad_norm": 0.20944985808577066, | |
| "learning_rate": 1.91006600660066e-05, | |
| "loss": 0.3763, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 1.9732739420935412, | |
| "grad_norm": 0.21358766726060044, | |
| "learning_rate": 1.905940594059406e-05, | |
| "loss": 0.3584, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 1.975501113585746, | |
| "grad_norm": 0.20776733067310027, | |
| "learning_rate": 1.901815181518152e-05, | |
| "loss": 0.3547, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 1.977728285077951, | |
| "grad_norm": 0.20491175737408723, | |
| "learning_rate": 1.8976897689768978e-05, | |
| "loss": 0.3471, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 1.9799554565701558, | |
| "grad_norm": 0.20282014026788026, | |
| "learning_rate": 1.8935643564356436e-05, | |
| "loss": 0.3648, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 1.9821826280623607, | |
| "grad_norm": 0.2366672812165325, | |
| "learning_rate": 1.8894389438943898e-05, | |
| "loss": 0.3557, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.9844097995545658, | |
| "grad_norm": 0.22040321762718917, | |
| "learning_rate": 1.8853135313531353e-05, | |
| "loss": 0.3663, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 1.9866369710467706, | |
| "grad_norm": 0.19386414737029778, | |
| "learning_rate": 1.8811881188118814e-05, | |
| "loss": 0.3429, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 1.9888641425389755, | |
| "grad_norm": 0.19057462809948442, | |
| "learning_rate": 1.8770627062706273e-05, | |
| "loss": 0.3277, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 1.9910913140311806, | |
| "grad_norm": 0.20415488056283265, | |
| "learning_rate": 1.872937293729373e-05, | |
| "loss": 0.3436, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 1.9933184855233854, | |
| "grad_norm": 0.20784568086315625, | |
| "learning_rate": 1.868811881188119e-05, | |
| "loss": 0.3451, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 1.9955456570155903, | |
| "grad_norm": 0.20041974234227464, | |
| "learning_rate": 1.8646864686468647e-05, | |
| "loss": 0.363, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 1.9977728285077951, | |
| "grad_norm": 0.20635848257127473, | |
| "learning_rate": 1.8605610561056106e-05, | |
| "loss": 0.3604, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.22254223937545428, | |
| "learning_rate": 1.8564356435643564e-05, | |
| "loss": 0.3253, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 2.002227171492205, | |
| "grad_norm": 0.2917879126350186, | |
| "learning_rate": 1.8523102310231026e-05, | |
| "loss": 0.2855, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 2.0044543429844097, | |
| "grad_norm": 0.2154422238702394, | |
| "learning_rate": 1.848184818481848e-05, | |
| "loss": 0.2734, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.0066815144766146, | |
| "grad_norm": 0.3199788710157364, | |
| "learning_rate": 1.8440594059405942e-05, | |
| "loss": 0.2753, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 2.0089086859688194, | |
| "grad_norm": 0.2564787157140946, | |
| "learning_rate": 1.83993399339934e-05, | |
| "loss": 0.2773, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 2.0111358574610243, | |
| "grad_norm": 0.21607906102561703, | |
| "learning_rate": 1.835808580858086e-05, | |
| "loss": 0.2644, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 2.0133630289532296, | |
| "grad_norm": 0.2505134364435312, | |
| "learning_rate": 1.8316831683168317e-05, | |
| "loss": 0.2655, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 2.0155902004454345, | |
| "grad_norm": 0.2533151837819191, | |
| "learning_rate": 1.827557755775578e-05, | |
| "loss": 0.2655, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 2.0178173719376393, | |
| "grad_norm": 0.21189444791251513, | |
| "learning_rate": 1.8234323432343233e-05, | |
| "loss": 0.2606, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 2.020044543429844, | |
| "grad_norm": 0.254765005496945, | |
| "learning_rate": 1.8193069306930695e-05, | |
| "loss": 0.2748, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 2.022271714922049, | |
| "grad_norm": 0.2778581181072693, | |
| "learning_rate": 1.8151815181518153e-05, | |
| "loss": 0.2832, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 2.024498886414254, | |
| "grad_norm": 0.23046986522656218, | |
| "learning_rate": 1.811056105610561e-05, | |
| "loss": 0.2866, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 2.0267260579064588, | |
| "grad_norm": 0.19649792986980408, | |
| "learning_rate": 1.806930693069307e-05, | |
| "loss": 0.2613, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.0289532293986636, | |
| "grad_norm": 0.2248761596893703, | |
| "learning_rate": 1.8028052805280528e-05, | |
| "loss": 0.2645, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 2.0311804008908685, | |
| "grad_norm": 0.22485773246436794, | |
| "learning_rate": 1.798679867986799e-05, | |
| "loss": 0.2548, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 2.0334075723830733, | |
| "grad_norm": 0.21140559547488116, | |
| "learning_rate": 1.7945544554455445e-05, | |
| "loss": 0.2687, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 2.035634743875278, | |
| "grad_norm": 0.23292737166191993, | |
| "learning_rate": 1.7904290429042906e-05, | |
| "loss": 0.2822, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 2.0378619153674835, | |
| "grad_norm": 0.23934152384126353, | |
| "learning_rate": 1.7863036303630364e-05, | |
| "loss": 0.2916, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 2.0400890868596884, | |
| "grad_norm": 0.20766845385270943, | |
| "learning_rate": 1.7821782178217823e-05, | |
| "loss": 0.275, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 2.0423162583518932, | |
| "grad_norm": 0.24248253295638, | |
| "learning_rate": 1.778052805280528e-05, | |
| "loss": 0.2693, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 2.044543429844098, | |
| "grad_norm": 0.23923396143066, | |
| "learning_rate": 1.7739273927392743e-05, | |
| "loss": 0.2687, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 2.046770601336303, | |
| "grad_norm": 0.21384650391369592, | |
| "learning_rate": 1.7698019801980197e-05, | |
| "loss": 0.2594, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 2.048997772828508, | |
| "grad_norm": 0.21923106830127312, | |
| "learning_rate": 1.765676567656766e-05, | |
| "loss": 0.2648, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.0512249443207127, | |
| "grad_norm": 0.21920380748549745, | |
| "learning_rate": 1.7615511551155117e-05, | |
| "loss": 0.2627, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 2.0534521158129175, | |
| "grad_norm": 0.221655093219312, | |
| "learning_rate": 1.7574257425742576e-05, | |
| "loss": 0.2895, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 2.0556792873051224, | |
| "grad_norm": 0.240989269486224, | |
| "learning_rate": 1.7533003300330034e-05, | |
| "loss": 0.286, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 2.0579064587973273, | |
| "grad_norm": 0.2265543567040155, | |
| "learning_rate": 1.7491749174917492e-05, | |
| "loss": 0.2652, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 2.060133630289532, | |
| "grad_norm": 0.6652483460142316, | |
| "learning_rate": 1.745049504950495e-05, | |
| "loss": 0.282, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 2.062360801781737, | |
| "grad_norm": 0.22338210010928283, | |
| "learning_rate": 1.740924092409241e-05, | |
| "loss": 0.2606, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 2.0645879732739423, | |
| "grad_norm": 0.2198982670309833, | |
| "learning_rate": 1.736798679867987e-05, | |
| "loss": 0.2669, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 2.066815144766147, | |
| "grad_norm": 0.2158641679889132, | |
| "learning_rate": 1.7326732673267325e-05, | |
| "loss": 0.2836, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 2.069042316258352, | |
| "grad_norm": 0.23593023008090405, | |
| "learning_rate": 1.7285478547854787e-05, | |
| "loss": 0.2796, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 2.071269487750557, | |
| "grad_norm": 0.2411263199249157, | |
| "learning_rate": 1.7244224422442245e-05, | |
| "loss": 0.2792, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.0734966592427617, | |
| "grad_norm": 0.2245801693901106, | |
| "learning_rate": 1.7202970297029703e-05, | |
| "loss": 0.2847, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 2.0757238307349666, | |
| "grad_norm": 0.2196777801290482, | |
| "learning_rate": 1.716171617161716e-05, | |
| "loss": 0.2721, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 2.0779510022271714, | |
| "grad_norm": 0.21503454059486582, | |
| "learning_rate": 1.7120462046204623e-05, | |
| "loss": 0.2577, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 2.0801781737193763, | |
| "grad_norm": 0.22247608374544567, | |
| "learning_rate": 1.707920792079208e-05, | |
| "loss": 0.2743, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 2.082405345211581, | |
| "grad_norm": 0.2181193004044087, | |
| "learning_rate": 1.703795379537954e-05, | |
| "loss": 0.2724, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 2.084632516703786, | |
| "grad_norm": 0.22746691098548233, | |
| "learning_rate": 1.6996699669966998e-05, | |
| "loss": 0.2796, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 2.086859688195991, | |
| "grad_norm": 0.19996305587329172, | |
| "learning_rate": 1.6955445544554456e-05, | |
| "loss": 0.2595, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 2.089086859688196, | |
| "grad_norm": 0.20563128154362556, | |
| "learning_rate": 1.6914191419141915e-05, | |
| "loss": 0.2652, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 2.091314031180401, | |
| "grad_norm": 0.2199513127201024, | |
| "learning_rate": 1.6872937293729373e-05, | |
| "loss": 0.2907, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 2.093541202672606, | |
| "grad_norm": 0.219015689133294, | |
| "learning_rate": 1.6831683168316834e-05, | |
| "loss": 0.2844, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.0957683741648108, | |
| "grad_norm": 0.19858942867264778, | |
| "learning_rate": 1.679042904290429e-05, | |
| "loss": 0.2768, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 2.0979955456570156, | |
| "grad_norm": 0.19938626187858713, | |
| "learning_rate": 1.674917491749175e-05, | |
| "loss": 0.2683, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 2.1002227171492205, | |
| "grad_norm": 0.19250255807127986, | |
| "learning_rate": 1.670792079207921e-05, | |
| "loss": 0.2515, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 2.1024498886414253, | |
| "grad_norm": 0.20353899319399785, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.2605, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 2.10467706013363, | |
| "grad_norm": 0.21472333914350644, | |
| "learning_rate": 1.6625412541254126e-05, | |
| "loss": 0.2594, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 2.106904231625835, | |
| "grad_norm": 0.19331389223832582, | |
| "learning_rate": 1.6584158415841587e-05, | |
| "loss": 0.2779, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 2.10913140311804, | |
| "grad_norm": 0.20336569846190305, | |
| "learning_rate": 1.6542904290429042e-05, | |
| "loss": 0.279, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 2.111358574610245, | |
| "grad_norm": 0.21346116075505495, | |
| "learning_rate": 1.6501650165016504e-05, | |
| "loss": 0.2842, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 2.11358574610245, | |
| "grad_norm": 0.19001897728741188, | |
| "learning_rate": 1.6460396039603962e-05, | |
| "loss": 0.2627, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 2.115812917594655, | |
| "grad_norm": 0.21819996809302278, | |
| "learning_rate": 1.641914191419142e-05, | |
| "loss": 0.2733, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.11804008908686, | |
| "grad_norm": 0.1927124136398043, | |
| "learning_rate": 1.637788778877888e-05, | |
| "loss": 0.2734, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 2.1202672605790647, | |
| "grad_norm": 0.20472312874405912, | |
| "learning_rate": 1.6336633663366337e-05, | |
| "loss": 0.2704, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 2.1224944320712695, | |
| "grad_norm": 0.19355755128512284, | |
| "learning_rate": 1.6295379537953795e-05, | |
| "loss": 0.2652, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 2.1247216035634744, | |
| "grad_norm": 0.2323231078698614, | |
| "learning_rate": 1.6254125412541253e-05, | |
| "loss": 0.2987, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 2.1269487750556793, | |
| "grad_norm": 0.20626477328090762, | |
| "learning_rate": 1.6212871287128715e-05, | |
| "loss": 0.2719, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 2.129175946547884, | |
| "grad_norm": 0.21231036913203105, | |
| "learning_rate": 1.617161716171617e-05, | |
| "loss": 0.279, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 2.131403118040089, | |
| "grad_norm": 0.20467332985866407, | |
| "learning_rate": 1.613036303630363e-05, | |
| "loss": 0.2759, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 2.133630289532294, | |
| "grad_norm": 0.1887795320491789, | |
| "learning_rate": 1.608910891089109e-05, | |
| "loss": 0.2754, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 2.1358574610244987, | |
| "grad_norm": 0.1965487030780761, | |
| "learning_rate": 1.604785478547855e-05, | |
| "loss": 0.2598, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 2.138084632516704, | |
| "grad_norm": 0.19578496963005818, | |
| "learning_rate": 1.6006600660066006e-05, | |
| "loss": 0.2726, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.140311804008909, | |
| "grad_norm": 0.1945963650282771, | |
| "learning_rate": 1.5965346534653468e-05, | |
| "loss": 0.2773, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 2.1425389755011137, | |
| "grad_norm": 0.19656389161987292, | |
| "learning_rate": 1.5924092409240926e-05, | |
| "loss": 0.2711, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 2.1447661469933186, | |
| "grad_norm": 0.2632692695136681, | |
| "learning_rate": 1.5882838283828385e-05, | |
| "loss": 0.2865, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 2.1469933184855234, | |
| "grad_norm": 0.20089943924523118, | |
| "learning_rate": 1.5841584158415843e-05, | |
| "loss": 0.2787, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 2.1492204899777283, | |
| "grad_norm": 0.2059781904386693, | |
| "learning_rate": 1.58003300330033e-05, | |
| "loss": 0.2611, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 2.151447661469933, | |
| "grad_norm": 0.20264777332725245, | |
| "learning_rate": 1.575907590759076e-05, | |
| "loss": 0.2717, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 2.153674832962138, | |
| "grad_norm": 0.1899938605683298, | |
| "learning_rate": 1.5717821782178218e-05, | |
| "loss": 0.245, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 2.155902004454343, | |
| "grad_norm": 0.22255692718026238, | |
| "learning_rate": 1.567656765676568e-05, | |
| "loss": 0.2667, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 2.1581291759465477, | |
| "grad_norm": 0.22124431095213815, | |
| "learning_rate": 1.5635313531353134e-05, | |
| "loss": 0.2711, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 2.1603563474387526, | |
| "grad_norm": 0.19003905428878046, | |
| "learning_rate": 1.5594059405940596e-05, | |
| "loss": 0.258, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.1625835189309575, | |
| "grad_norm": 0.23522626395657878, | |
| "learning_rate": 1.5552805280528054e-05, | |
| "loss": 0.2757, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 2.1648106904231628, | |
| "grad_norm": 0.20825230040823375, | |
| "learning_rate": 1.5511551155115512e-05, | |
| "loss": 0.2798, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 2.1670378619153676, | |
| "grad_norm": 0.1914440301183656, | |
| "learning_rate": 1.547029702970297e-05, | |
| "loss": 0.2462, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 2.1692650334075725, | |
| "grad_norm": 0.19981127087807474, | |
| "learning_rate": 1.5429042904290432e-05, | |
| "loss": 0.2927, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 2.1714922048997773, | |
| "grad_norm": 0.1894361460402151, | |
| "learning_rate": 1.5387788778877887e-05, | |
| "loss": 0.2503, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 2.173719376391982, | |
| "grad_norm": 0.22245929757102517, | |
| "learning_rate": 1.534653465346535e-05, | |
| "loss": 0.2685, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 2.175946547884187, | |
| "grad_norm": 0.19330615950838612, | |
| "learning_rate": 1.5305280528052807e-05, | |
| "loss": 0.267, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 2.178173719376392, | |
| "grad_norm": 0.19932638559655444, | |
| "learning_rate": 1.5264026402640265e-05, | |
| "loss": 0.2598, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 2.180400890868597, | |
| "grad_norm": 0.21889869568522272, | |
| "learning_rate": 1.5222772277227723e-05, | |
| "loss": 0.2611, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 2.1826280623608016, | |
| "grad_norm": 0.19773355190252426, | |
| "learning_rate": 1.5181518151815183e-05, | |
| "loss": 0.2722, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.1848552338530065, | |
| "grad_norm": 0.22056458426364567, | |
| "learning_rate": 1.514026402640264e-05, | |
| "loss": 0.2885, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 2.187082405345212, | |
| "grad_norm": 0.2113264331065395, | |
| "learning_rate": 1.50990099009901e-05, | |
| "loss": 0.2855, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 2.1893095768374167, | |
| "grad_norm": 0.19545727618324243, | |
| "learning_rate": 1.5057755775577558e-05, | |
| "loss": 0.2691, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 2.1915367483296215, | |
| "grad_norm": 0.20344543295711154, | |
| "learning_rate": 1.5016501650165018e-05, | |
| "loss": 0.2768, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 2.1937639198218264, | |
| "grad_norm": 0.2050600328409546, | |
| "learning_rate": 1.4975247524752475e-05, | |
| "loss": 0.2662, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 2.1959910913140313, | |
| "grad_norm": 0.1995231599297292, | |
| "learning_rate": 1.4933993399339935e-05, | |
| "loss": 0.2756, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 2.198218262806236, | |
| "grad_norm": 0.21638732107895453, | |
| "learning_rate": 1.4892739273927395e-05, | |
| "loss": 0.2845, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 2.200445434298441, | |
| "grad_norm": 0.5148923073947419, | |
| "learning_rate": 1.4851485148514851e-05, | |
| "loss": 0.2676, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 2.202672605790646, | |
| "grad_norm": 0.1949433992776448, | |
| "learning_rate": 1.4810231023102311e-05, | |
| "loss": 0.2676, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 2.2048997772828507, | |
| "grad_norm": 0.21378970538937492, | |
| "learning_rate": 1.4768976897689771e-05, | |
| "loss": 0.2745, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.2071269487750556, | |
| "grad_norm": 0.21371986128888149, | |
| "learning_rate": 1.4727722772277228e-05, | |
| "loss": 0.2634, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 2.2093541202672604, | |
| "grad_norm": 0.20102771902186908, | |
| "learning_rate": 1.4686468646864688e-05, | |
| "loss": 0.269, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 2.2115812917594653, | |
| "grad_norm": 0.20157010634064784, | |
| "learning_rate": 1.4645214521452147e-05, | |
| "loss": 0.2755, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 2.2138084632516706, | |
| "grad_norm": 0.22829795778649614, | |
| "learning_rate": 1.4603960396039604e-05, | |
| "loss": 0.2576, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 2.2160356347438754, | |
| "grad_norm": 0.20055620976746705, | |
| "learning_rate": 1.4562706270627064e-05, | |
| "loss": 0.2593, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 2.2182628062360803, | |
| "grad_norm": 0.22009126881477245, | |
| "learning_rate": 1.4521452145214522e-05, | |
| "loss": 0.2931, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 2.220489977728285, | |
| "grad_norm": 0.21589312179945735, | |
| "learning_rate": 1.448019801980198e-05, | |
| "loss": 0.2791, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 2.22271714922049, | |
| "grad_norm": 0.2107323668966019, | |
| "learning_rate": 1.4438943894389439e-05, | |
| "loss": 0.266, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 2.224944320712695, | |
| "grad_norm": 0.21192905027945474, | |
| "learning_rate": 1.4397689768976899e-05, | |
| "loss": 0.2679, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 2.2271714922048997, | |
| "grad_norm": 0.20176617353427523, | |
| "learning_rate": 1.4356435643564355e-05, | |
| "loss": 0.2894, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.2293986636971046, | |
| "grad_norm": 0.20379631413029126, | |
| "learning_rate": 1.4315181518151815e-05, | |
| "loss": 0.2545, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 2.2316258351893095, | |
| "grad_norm": 0.2042103186102569, | |
| "learning_rate": 1.4273927392739275e-05, | |
| "loss": 0.2815, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 2.2338530066815143, | |
| "grad_norm": 0.21761471948192412, | |
| "learning_rate": 1.4232673267326732e-05, | |
| "loss": 0.2702, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 2.236080178173719, | |
| "grad_norm": 0.20551688021741427, | |
| "learning_rate": 1.4191419141914192e-05, | |
| "loss": 0.2667, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 2.2383073496659245, | |
| "grad_norm": 0.2220623508747115, | |
| "learning_rate": 1.4150165016501652e-05, | |
| "loss": 0.2953, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 2.2405345211581293, | |
| "grad_norm": 0.19576050778193377, | |
| "learning_rate": 1.4108910891089108e-05, | |
| "loss": 0.2767, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 2.242761692650334, | |
| "grad_norm": 0.20687710009830737, | |
| "learning_rate": 1.4067656765676568e-05, | |
| "loss": 0.2663, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 2.244988864142539, | |
| "grad_norm": 0.21049378082401707, | |
| "learning_rate": 1.4026402640264028e-05, | |
| "loss": 0.2663, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 2.247216035634744, | |
| "grad_norm": 0.19273517672626037, | |
| "learning_rate": 1.3985148514851486e-05, | |
| "loss": 0.2714, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 2.249443207126949, | |
| "grad_norm": 0.19519934516938037, | |
| "learning_rate": 1.3943894389438945e-05, | |
| "loss": 0.2729, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.2516703786191536, | |
| "grad_norm": 0.2067756058579799, | |
| "learning_rate": 1.3902640264026403e-05, | |
| "loss": 0.2784, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 2.2538975501113585, | |
| "grad_norm": 0.19889505929302467, | |
| "learning_rate": 1.3861386138613863e-05, | |
| "loss": 0.2604, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 2.2561247216035634, | |
| "grad_norm": 0.2022726860047395, | |
| "learning_rate": 1.382013201320132e-05, | |
| "loss": 0.2866, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 2.2583518930957682, | |
| "grad_norm": 0.21628682874250826, | |
| "learning_rate": 1.377887788778878e-05, | |
| "loss": 0.2783, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 2.260579064587973, | |
| "grad_norm": 0.22534223881440016, | |
| "learning_rate": 1.373762376237624e-05, | |
| "loss": 0.2655, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 2.262806236080178, | |
| "grad_norm": 0.2237062248627211, | |
| "learning_rate": 1.3696369636963696e-05, | |
| "loss": 0.2752, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 2.2650334075723833, | |
| "grad_norm": 0.19796277865121822, | |
| "learning_rate": 1.3655115511551156e-05, | |
| "loss": 0.2841, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 2.267260579064588, | |
| "grad_norm": 0.22580292007775335, | |
| "learning_rate": 1.3613861386138616e-05, | |
| "loss": 0.282, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 2.269487750556793, | |
| "grad_norm": 0.23063814482354972, | |
| "learning_rate": 1.3572607260726072e-05, | |
| "loss": 0.2686, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 2.271714922048998, | |
| "grad_norm": 0.22712325781289536, | |
| "learning_rate": 1.3531353135313532e-05, | |
| "loss": 0.2695, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.2739420935412027, | |
| "grad_norm": 0.20320569611937253, | |
| "learning_rate": 1.3490099009900992e-05, | |
| "loss": 0.2627, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 2.2761692650334076, | |
| "grad_norm": 0.22639429235101766, | |
| "learning_rate": 1.3448844884488449e-05, | |
| "loss": 0.2766, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 2.2783964365256124, | |
| "grad_norm": 0.2149149957786946, | |
| "learning_rate": 1.3407590759075909e-05, | |
| "loss": 0.2808, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 2.2806236080178173, | |
| "grad_norm": 0.20731196155393639, | |
| "learning_rate": 1.3366336633663367e-05, | |
| "loss": 0.2559, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 2.282850779510022, | |
| "grad_norm": 0.20039508340783346, | |
| "learning_rate": 1.3325082508250825e-05, | |
| "loss": 0.2634, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 2.285077951002227, | |
| "grad_norm": 0.21716535787151417, | |
| "learning_rate": 1.3283828382838284e-05, | |
| "loss": 0.2649, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 2.2873051224944323, | |
| "grad_norm": 0.19008765090691132, | |
| "learning_rate": 1.3242574257425743e-05, | |
| "loss": 0.2655, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 2.289532293986637, | |
| "grad_norm": 0.21202624251215715, | |
| "learning_rate": 1.32013201320132e-05, | |
| "loss": 0.2817, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 2.291759465478842, | |
| "grad_norm": 0.21711447076279528, | |
| "learning_rate": 1.316006600660066e-05, | |
| "loss": 0.2757, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 2.293986636971047, | |
| "grad_norm": 0.2053491511971421, | |
| "learning_rate": 1.311881188118812e-05, | |
| "loss": 0.2814, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.2962138084632517, | |
| "grad_norm": 0.17966000787705147, | |
| "learning_rate": 1.3077557755775577e-05, | |
| "loss": 0.249, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 2.2984409799554566, | |
| "grad_norm": 0.2097272539902852, | |
| "learning_rate": 1.3036303630363036e-05, | |
| "loss": 0.2816, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 2.3006681514476615, | |
| "grad_norm": 0.22355137552793322, | |
| "learning_rate": 1.2995049504950496e-05, | |
| "loss": 0.2949, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 2.3028953229398663, | |
| "grad_norm": 0.1973752759660782, | |
| "learning_rate": 1.2953795379537956e-05, | |
| "loss": 0.2729, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 2.305122494432071, | |
| "grad_norm": 0.1938493711007142, | |
| "learning_rate": 1.2912541254125413e-05, | |
| "loss": 0.2748, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 2.307349665924276, | |
| "grad_norm": 0.18013326195522006, | |
| "learning_rate": 1.2871287128712873e-05, | |
| "loss": 0.2577, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 2.309576837416481, | |
| "grad_norm": 0.212280614898667, | |
| "learning_rate": 1.2830033003300331e-05, | |
| "loss": 0.2782, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 2.3118040089086858, | |
| "grad_norm": 0.22611659220287397, | |
| "learning_rate": 1.278877887788779e-05, | |
| "loss": 0.2817, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 2.3140311804008906, | |
| "grad_norm": 0.18816070651441863, | |
| "learning_rate": 1.2747524752475248e-05, | |
| "loss": 0.272, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 2.316258351893096, | |
| "grad_norm": 0.20816294895034484, | |
| "learning_rate": 1.2706270627062708e-05, | |
| "loss": 0.2707, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.318485523385301, | |
| "grad_norm": 0.23837725326477052, | |
| "learning_rate": 1.2665016501650164e-05, | |
| "loss": 0.2921, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 2.3207126948775056, | |
| "grad_norm": 0.19486477654916956, | |
| "learning_rate": 1.2623762376237624e-05, | |
| "loss": 0.2708, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 2.3229398663697105, | |
| "grad_norm": 0.20569445864400387, | |
| "learning_rate": 1.2582508250825084e-05, | |
| "loss": 0.2925, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 2.3251670378619154, | |
| "grad_norm": 0.20033809716151726, | |
| "learning_rate": 1.254125412541254e-05, | |
| "loss": 0.2689, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 2.3273942093541202, | |
| "grad_norm": 0.19824467122341974, | |
| "learning_rate": 1.25e-05, | |
| "loss": 0.2694, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 2.329621380846325, | |
| "grad_norm": 0.18892618648996176, | |
| "learning_rate": 1.2458745874587459e-05, | |
| "loss": 0.2724, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 2.33184855233853, | |
| "grad_norm": 0.1850645919338069, | |
| "learning_rate": 1.2417491749174919e-05, | |
| "loss": 0.2525, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 2.334075723830735, | |
| "grad_norm": 0.21133712560743997, | |
| "learning_rate": 1.2376237623762377e-05, | |
| "loss": 0.2827, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 2.33630289532294, | |
| "grad_norm": 0.18596901788858, | |
| "learning_rate": 1.2334983498349835e-05, | |
| "loss": 0.255, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 2.338530066815145, | |
| "grad_norm": 0.18710426212811665, | |
| "learning_rate": 1.2293729372937295e-05, | |
| "loss": 0.2571, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.34075723830735, | |
| "grad_norm": 0.2056915328832616, | |
| "learning_rate": 1.2252475247524754e-05, | |
| "loss": 0.2565, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 2.3429844097995547, | |
| "grad_norm": 0.21136713475340663, | |
| "learning_rate": 1.2211221122112212e-05, | |
| "loss": 0.2625, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 2.3452115812917596, | |
| "grad_norm": 0.19210427375018393, | |
| "learning_rate": 1.216996699669967e-05, | |
| "loss": 0.2613, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 2.3474387527839644, | |
| "grad_norm": 0.1980726074792816, | |
| "learning_rate": 1.2128712871287128e-05, | |
| "loss": 0.2671, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 2.3496659242761693, | |
| "grad_norm": 0.19016273244979234, | |
| "learning_rate": 1.2087458745874588e-05, | |
| "loss": 0.2812, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 2.351893095768374, | |
| "grad_norm": 0.19642715940775765, | |
| "learning_rate": 1.2046204620462047e-05, | |
| "loss": 0.2542, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 2.354120267260579, | |
| "grad_norm": 0.19762415084293058, | |
| "learning_rate": 1.2004950495049505e-05, | |
| "loss": 0.2678, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 2.356347438752784, | |
| "grad_norm": 0.1937697142736061, | |
| "learning_rate": 1.1963696369636965e-05, | |
| "loss": 0.2835, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 2.3585746102449887, | |
| "grad_norm": 0.20404730565329052, | |
| "learning_rate": 1.1922442244224423e-05, | |
| "loss": 0.268, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 2.3608017817371936, | |
| "grad_norm": 0.19264558829999212, | |
| "learning_rate": 1.1881188118811881e-05, | |
| "loss": 0.2705, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.3630289532293984, | |
| "grad_norm": 0.18612729824589969, | |
| "learning_rate": 1.1839933993399341e-05, | |
| "loss": 0.2655, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 2.3652561247216037, | |
| "grad_norm": 0.19184627981030214, | |
| "learning_rate": 1.17986798679868e-05, | |
| "loss": 0.2739, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 2.3674832962138086, | |
| "grad_norm": 0.20834509457158706, | |
| "learning_rate": 1.1757425742574258e-05, | |
| "loss": 0.2884, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 2.3697104677060135, | |
| "grad_norm": 0.20972582839416928, | |
| "learning_rate": 1.1716171617161718e-05, | |
| "loss": 0.2699, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 2.3719376391982183, | |
| "grad_norm": 0.1830305603494533, | |
| "learning_rate": 1.1674917491749176e-05, | |
| "loss": 0.2685, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 2.374164810690423, | |
| "grad_norm": 0.19984211566451557, | |
| "learning_rate": 1.1633663366336634e-05, | |
| "loss": 0.2777, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 2.376391982182628, | |
| "grad_norm": 0.19625399200236907, | |
| "learning_rate": 1.1592409240924092e-05, | |
| "loss": 0.2878, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 2.378619153674833, | |
| "grad_norm": 0.18837463604759586, | |
| "learning_rate": 1.155115511551155e-05, | |
| "loss": 0.2721, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 2.3808463251670378, | |
| "grad_norm": 0.179269880400689, | |
| "learning_rate": 1.150990099009901e-05, | |
| "loss": 0.2597, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 2.3830734966592426, | |
| "grad_norm": 0.17905269452030195, | |
| "learning_rate": 1.1468646864686469e-05, | |
| "loss": 0.2669, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.3853006681514475, | |
| "grad_norm": 0.18871772253331306, | |
| "learning_rate": 1.1427392739273927e-05, | |
| "loss": 0.2509, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 2.387527839643653, | |
| "grad_norm": 0.18408752726862213, | |
| "learning_rate": 1.1386138613861387e-05, | |
| "loss": 0.2645, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 2.3897550111358576, | |
| "grad_norm": 0.1931246236770583, | |
| "learning_rate": 1.1344884488448845e-05, | |
| "loss": 0.27, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 2.3919821826280625, | |
| "grad_norm": 0.20313530066494817, | |
| "learning_rate": 1.1303630363036304e-05, | |
| "loss": 0.2774, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 2.3942093541202674, | |
| "grad_norm": 0.1877854318454684, | |
| "learning_rate": 1.1262376237623764e-05, | |
| "loss": 0.2631, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 2.3964365256124722, | |
| "grad_norm": 0.18530096780240002, | |
| "learning_rate": 1.1221122112211222e-05, | |
| "loss": 0.254, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 2.398663697104677, | |
| "grad_norm": 0.18435642955392806, | |
| "learning_rate": 1.1179867986798682e-05, | |
| "loss": 0.2558, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 2.400890868596882, | |
| "grad_norm": 0.19898779382739942, | |
| "learning_rate": 1.113861386138614e-05, | |
| "loss": 0.288, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 2.403118040089087, | |
| "grad_norm": 0.2276311483736436, | |
| "learning_rate": 1.1097359735973598e-05, | |
| "loss": 0.2944, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 2.4053452115812917, | |
| "grad_norm": 0.18877965389508786, | |
| "learning_rate": 1.1056105610561057e-05, | |
| "loss": 0.2536, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.4075723830734965, | |
| "grad_norm": 0.21066634926735692, | |
| "learning_rate": 1.1014851485148515e-05, | |
| "loss": 0.2926, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 2.4097995545657014, | |
| "grad_norm": 0.18422535941666346, | |
| "learning_rate": 1.0973597359735973e-05, | |
| "loss": 0.2662, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 2.4120267260579062, | |
| "grad_norm": 0.19053201856845206, | |
| "learning_rate": 1.0932343234323433e-05, | |
| "loss": 0.2668, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 2.4142538975501115, | |
| "grad_norm": 0.18892660093681088, | |
| "learning_rate": 1.0891089108910891e-05, | |
| "loss": 0.262, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 2.4164810690423164, | |
| "grad_norm": 0.18924935218566152, | |
| "learning_rate": 1.084983498349835e-05, | |
| "loss": 0.2618, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 2.4187082405345213, | |
| "grad_norm": 0.18575274067232, | |
| "learning_rate": 1.080858085808581e-05, | |
| "loss": 0.2659, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 2.420935412026726, | |
| "grad_norm": 0.19770032474640944, | |
| "learning_rate": 1.0767326732673268e-05, | |
| "loss": 0.2694, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 2.423162583518931, | |
| "grad_norm": 0.1871465004147872, | |
| "learning_rate": 1.0726072607260726e-05, | |
| "loss": 0.2653, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 2.425389755011136, | |
| "grad_norm": 0.19613520681940685, | |
| "learning_rate": 1.0684818481848186e-05, | |
| "loss": 0.2596, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 2.4276169265033407, | |
| "grad_norm": 0.2013804064771092, | |
| "learning_rate": 1.0643564356435644e-05, | |
| "loss": 0.2744, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.4298440979955456, | |
| "grad_norm": 0.18448807360998867, | |
| "learning_rate": 1.0602310231023104e-05, | |
| "loss": 0.2739, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 2.4320712694877504, | |
| "grad_norm": 0.17436121469164215, | |
| "learning_rate": 1.0561056105610562e-05, | |
| "loss": 0.2556, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 2.4342984409799553, | |
| "grad_norm": 0.20230038331757244, | |
| "learning_rate": 1.051980198019802e-05, | |
| "loss": 0.2775, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 2.4365256124721606, | |
| "grad_norm": 0.19739011178754348, | |
| "learning_rate": 1.0478547854785479e-05, | |
| "loss": 0.2699, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 2.4387527839643655, | |
| "grad_norm": 0.19593366852721233, | |
| "learning_rate": 1.0437293729372937e-05, | |
| "loss": 0.2626, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 2.4409799554565703, | |
| "grad_norm": 0.20418637427016842, | |
| "learning_rate": 1.0396039603960395e-05, | |
| "loss": 0.2766, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 2.443207126948775, | |
| "grad_norm": 0.1787939591686354, | |
| "learning_rate": 1.0354785478547855e-05, | |
| "loss": 0.2552, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 2.44543429844098, | |
| "grad_norm": 0.1922023637484877, | |
| "learning_rate": 1.0313531353135314e-05, | |
| "loss": 0.2639, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 2.447661469933185, | |
| "grad_norm": 0.18990449404250637, | |
| "learning_rate": 1.0272277227722772e-05, | |
| "loss": 0.2554, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 2.4498886414253898, | |
| "grad_norm": 0.19231913714571738, | |
| "learning_rate": 1.0231023102310232e-05, | |
| "loss": 0.2703, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.4521158129175946, | |
| "grad_norm": 0.18439832863625685, | |
| "learning_rate": 1.018976897689769e-05, | |
| "loss": 0.2708, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 2.4543429844097995, | |
| "grad_norm": 0.1825082756343376, | |
| "learning_rate": 1.014851485148515e-05, | |
| "loss": 0.2663, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 2.4565701559020043, | |
| "grad_norm": 0.19271366095212672, | |
| "learning_rate": 1.0107260726072608e-05, | |
| "loss": 0.2928, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 2.458797327394209, | |
| "grad_norm": 0.1929337738803978, | |
| "learning_rate": 1.0066006600660067e-05, | |
| "loss": 0.2701, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 2.461024498886414, | |
| "grad_norm": 0.18674110929001914, | |
| "learning_rate": 1.0024752475247527e-05, | |
| "loss": 0.2718, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 2.463251670378619, | |
| "grad_norm": 0.1742958795479238, | |
| "learning_rate": 9.983498349834985e-06, | |
| "loss": 0.2607, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 2.4654788418708242, | |
| "grad_norm": 0.19124442287313453, | |
| "learning_rate": 9.942244224422443e-06, | |
| "loss": 0.2841, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 2.467706013363029, | |
| "grad_norm": 0.17466533749128837, | |
| "learning_rate": 9.900990099009901e-06, | |
| "loss": 0.2569, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 2.469933184855234, | |
| "grad_norm": 0.18291754233017415, | |
| "learning_rate": 9.85973597359736e-06, | |
| "loss": 0.2658, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 2.472160356347439, | |
| "grad_norm": 0.18788097066997822, | |
| "learning_rate": 9.818481848184818e-06, | |
| "loss": 0.271, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.4743875278396437, | |
| "grad_norm": 0.19428141706955826, | |
| "learning_rate": 9.777227722772278e-06, | |
| "loss": 0.2794, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 2.4766146993318485, | |
| "grad_norm": 0.19910230370842083, | |
| "learning_rate": 9.735973597359736e-06, | |
| "loss": 0.2833, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 2.4788418708240534, | |
| "grad_norm": 0.19683763848878325, | |
| "learning_rate": 9.694719471947194e-06, | |
| "loss": 0.2785, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 2.4810690423162582, | |
| "grad_norm": 0.19801756868357392, | |
| "learning_rate": 9.653465346534654e-06, | |
| "loss": 0.2719, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 2.483296213808463, | |
| "grad_norm": 0.1826730168645165, | |
| "learning_rate": 9.612211221122113e-06, | |
| "loss": 0.2584, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 2.485523385300668, | |
| "grad_norm": 0.19696852384395833, | |
| "learning_rate": 9.570957095709572e-06, | |
| "loss": 0.2722, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 2.4877505567928733, | |
| "grad_norm": 0.18950166276846098, | |
| "learning_rate": 9.52970297029703e-06, | |
| "loss": 0.2706, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 2.489977728285078, | |
| "grad_norm": 0.1844705006714394, | |
| "learning_rate": 9.488448844884489e-06, | |
| "loss": 0.2563, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 2.492204899777283, | |
| "grad_norm": 0.19868072718137894, | |
| "learning_rate": 9.447194719471949e-06, | |
| "loss": 0.2684, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 2.494432071269488, | |
| "grad_norm": 0.2066675942912551, | |
| "learning_rate": 9.405940594059407e-06, | |
| "loss": 0.2815, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.4966592427616927, | |
| "grad_norm": 0.20088816875352536, | |
| "learning_rate": 9.364686468646865e-06, | |
| "loss": 0.2746, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 2.4988864142538976, | |
| "grad_norm": 0.21141165012413876, | |
| "learning_rate": 9.323432343234324e-06, | |
| "loss": 0.2738, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 2.5011135857461024, | |
| "grad_norm": 0.19699453704906772, | |
| "learning_rate": 9.282178217821782e-06, | |
| "loss": 0.27, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 2.5033407572383073, | |
| "grad_norm": 0.184869259863741, | |
| "learning_rate": 9.24092409240924e-06, | |
| "loss": 0.2629, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 2.505567928730512, | |
| "grad_norm": 0.20304155160431278, | |
| "learning_rate": 9.1996699669967e-06, | |
| "loss": 0.2603, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 2.507795100222717, | |
| "grad_norm": 0.21305764778554145, | |
| "learning_rate": 9.158415841584158e-06, | |
| "loss": 0.2747, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 2.510022271714922, | |
| "grad_norm": 0.22852912068589257, | |
| "learning_rate": 9.117161716171617e-06, | |
| "loss": 0.2725, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 2.5122494432071267, | |
| "grad_norm": 0.19848187457065516, | |
| "learning_rate": 9.075907590759077e-06, | |
| "loss": 0.2808, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 2.5144766146993316, | |
| "grad_norm": 0.1913588513644944, | |
| "learning_rate": 9.034653465346535e-06, | |
| "loss": 0.2631, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 2.516703786191537, | |
| "grad_norm": 0.40151960277216947, | |
| "learning_rate": 8.993399339933995e-06, | |
| "loss": 0.2795, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.5189309576837418, | |
| "grad_norm": 0.20250778837370625, | |
| "learning_rate": 8.952145214521453e-06, | |
| "loss": 0.2801, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 2.5211581291759466, | |
| "grad_norm": 0.20334890585637636, | |
| "learning_rate": 8.910891089108911e-06, | |
| "loss": 0.2813, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 2.5233853006681515, | |
| "grad_norm": 0.18704973977774514, | |
| "learning_rate": 8.869636963696371e-06, | |
| "loss": 0.2719, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 2.5256124721603563, | |
| "grad_norm": 0.19467440557527957, | |
| "learning_rate": 8.82838283828383e-06, | |
| "loss": 0.272, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 2.527839643652561, | |
| "grad_norm": 0.2008853859089321, | |
| "learning_rate": 8.787128712871288e-06, | |
| "loss": 0.286, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 2.530066815144766, | |
| "grad_norm": 0.19458679714398816, | |
| "learning_rate": 8.745874587458746e-06, | |
| "loss": 0.265, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 2.532293986636971, | |
| "grad_norm": 0.20465986625053167, | |
| "learning_rate": 8.704620462046204e-06, | |
| "loss": 0.2813, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 2.534521158129176, | |
| "grad_norm": 0.1895991283920018, | |
| "learning_rate": 8.663366336633663e-06, | |
| "loss": 0.2659, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 2.536748329621381, | |
| "grad_norm": 0.1879019267908632, | |
| "learning_rate": 8.622112211221123e-06, | |
| "loss": 0.2705, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 2.538975501113586, | |
| "grad_norm": 0.19743356419796973, | |
| "learning_rate": 8.58085808580858e-06, | |
| "loss": 0.2704, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.541202672605791, | |
| "grad_norm": 0.1883376854672621, | |
| "learning_rate": 8.53960396039604e-06, | |
| "loss": 0.275, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 2.5434298440979957, | |
| "grad_norm": 0.17992466715149383, | |
| "learning_rate": 8.498349834983499e-06, | |
| "loss": 0.2651, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 2.5456570155902005, | |
| "grad_norm": 0.17938120420166848, | |
| "learning_rate": 8.457095709570957e-06, | |
| "loss": 0.2763, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 2.5478841870824054, | |
| "grad_norm": 0.1766464605464754, | |
| "learning_rate": 8.415841584158417e-06, | |
| "loss": 0.2814, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 2.5501113585746102, | |
| "grad_norm": 0.1733272025991543, | |
| "learning_rate": 8.374587458745875e-06, | |
| "loss": 0.2505, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 2.552338530066815, | |
| "grad_norm": 0.19171087486125246, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 0.2669, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 2.55456570155902, | |
| "grad_norm": 0.20113910144003733, | |
| "learning_rate": 8.292079207920794e-06, | |
| "loss": 0.2743, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 2.556792873051225, | |
| "grad_norm": 0.188959196450275, | |
| "learning_rate": 8.250825082508252e-06, | |
| "loss": 0.2844, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 2.5590200445434297, | |
| "grad_norm": 0.1787093466409625, | |
| "learning_rate": 8.20957095709571e-06, | |
| "loss": 0.2821, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 2.5612472160356345, | |
| "grad_norm": 0.1942401995026469, | |
| "learning_rate": 8.168316831683168e-06, | |
| "loss": 0.2829, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.5634743875278394, | |
| "grad_norm": 0.19634490928315082, | |
| "learning_rate": 8.127062706270627e-06, | |
| "loss": 0.2826, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 2.5657015590200447, | |
| "grad_norm": 0.19079365604694734, | |
| "learning_rate": 8.085808580858085e-06, | |
| "loss": 0.2671, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 2.5679287305122496, | |
| "grad_norm": 0.18625347494470584, | |
| "learning_rate": 8.044554455445545e-06, | |
| "loss": 0.2704, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 2.5701559020044544, | |
| "grad_norm": 0.19152249857310585, | |
| "learning_rate": 8.003300330033003e-06, | |
| "loss": 0.2607, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 2.5723830734966593, | |
| "grad_norm": 0.21034174122909713, | |
| "learning_rate": 7.962046204620463e-06, | |
| "loss": 0.2776, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 2.574610244988864, | |
| "grad_norm": 0.19270298450827533, | |
| "learning_rate": 7.920792079207921e-06, | |
| "loss": 0.2633, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 2.576837416481069, | |
| "grad_norm": 0.19418933010831285, | |
| "learning_rate": 7.87953795379538e-06, | |
| "loss": 0.2819, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 2.579064587973274, | |
| "grad_norm": 0.1774967603891648, | |
| "learning_rate": 7.83828382838284e-06, | |
| "loss": 0.268, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 2.5812917594654787, | |
| "grad_norm": 0.1952783384290122, | |
| "learning_rate": 7.797029702970298e-06, | |
| "loss": 0.2714, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 2.5835189309576836, | |
| "grad_norm": 0.18333390666789304, | |
| "learning_rate": 7.755775577557756e-06, | |
| "loss": 0.2542, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.585746102449889, | |
| "grad_norm": 0.1915948045097857, | |
| "learning_rate": 7.714521452145216e-06, | |
| "loss": 0.2665, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 2.5879732739420938, | |
| "grad_norm": 0.18672447338790377, | |
| "learning_rate": 7.673267326732674e-06, | |
| "loss": 0.2584, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 2.5902004454342986, | |
| "grad_norm": 0.1829500342139928, | |
| "learning_rate": 7.632013201320133e-06, | |
| "loss": 0.2745, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 2.5924276169265035, | |
| "grad_norm": 0.18799104968969643, | |
| "learning_rate": 7.590759075907592e-06, | |
| "loss": 0.2627, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 2.5946547884187083, | |
| "grad_norm": 0.19458221392506975, | |
| "learning_rate": 7.54950495049505e-06, | |
| "loss": 0.2762, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 2.596881959910913, | |
| "grad_norm": 0.191352735133007, | |
| "learning_rate": 7.508250825082509e-06, | |
| "loss": 0.281, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 2.599109131403118, | |
| "grad_norm": 0.17408507908431892, | |
| "learning_rate": 7.466996699669967e-06, | |
| "loss": 0.2612, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 2.601336302895323, | |
| "grad_norm": 0.186054208066729, | |
| "learning_rate": 7.4257425742574256e-06, | |
| "loss": 0.2736, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 2.6035634743875278, | |
| "grad_norm": 0.18473369451560104, | |
| "learning_rate": 7.3844884488448855e-06, | |
| "loss": 0.266, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 2.6057906458797326, | |
| "grad_norm": 0.1938711489246747, | |
| "learning_rate": 7.343234323432344e-06, | |
| "loss": 0.2637, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.6080178173719375, | |
| "grad_norm": 0.19020930250626525, | |
| "learning_rate": 7.301980198019802e-06, | |
| "loss": 0.276, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 2.6102449888641424, | |
| "grad_norm": 0.1902014920990745, | |
| "learning_rate": 7.260726072607261e-06, | |
| "loss": 0.2931, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 2.612472160356347, | |
| "grad_norm": 0.20235613156985066, | |
| "learning_rate": 7.219471947194719e-06, | |
| "loss": 0.2788, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 2.614699331848552, | |
| "grad_norm": 0.1904905450934217, | |
| "learning_rate": 7.178217821782178e-06, | |
| "loss": 0.2813, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 2.6169265033407574, | |
| "grad_norm": 0.1829474505593454, | |
| "learning_rate": 7.136963696369638e-06, | |
| "loss": 0.2565, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 2.6191536748329622, | |
| "grad_norm": 0.18034492440862052, | |
| "learning_rate": 7.095709570957096e-06, | |
| "loss": 0.262, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 2.621380846325167, | |
| "grad_norm": 0.18602311086032206, | |
| "learning_rate": 7.054455445544554e-06, | |
| "loss": 0.277, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 2.623608017817372, | |
| "grad_norm": 0.1862942706304272, | |
| "learning_rate": 7.013201320132014e-06, | |
| "loss": 0.2583, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 2.625835189309577, | |
| "grad_norm": 0.21097629481431587, | |
| "learning_rate": 6.971947194719472e-06, | |
| "loss": 0.274, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 2.6280623608017817, | |
| "grad_norm": 0.19480593973142524, | |
| "learning_rate": 6.9306930693069314e-06, | |
| "loss": 0.2744, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.6302895322939865, | |
| "grad_norm": 0.20665384592439903, | |
| "learning_rate": 6.88943894389439e-06, | |
| "loss": 0.2809, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 2.6325167037861914, | |
| "grad_norm": 0.20673361836900345, | |
| "learning_rate": 6.848184818481848e-06, | |
| "loss": 0.2816, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 2.6347438752783967, | |
| "grad_norm": 0.19227285073839473, | |
| "learning_rate": 6.806930693069308e-06, | |
| "loss": 0.2628, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 2.6369710467706016, | |
| "grad_norm": 0.19492100553812766, | |
| "learning_rate": 6.765676567656766e-06, | |
| "loss": 0.2465, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 2.6391982182628064, | |
| "grad_norm": 0.1800432051918627, | |
| "learning_rate": 6.724422442244224e-06, | |
| "loss": 0.2554, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 2.6414253897550113, | |
| "grad_norm": 0.18423128116751586, | |
| "learning_rate": 6.6831683168316835e-06, | |
| "loss": 0.2768, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 2.643652561247216, | |
| "grad_norm": 0.18976438520324423, | |
| "learning_rate": 6.641914191419142e-06, | |
| "loss": 0.2685, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 2.645879732739421, | |
| "grad_norm": 0.1782736610121485, | |
| "learning_rate": 6.6006600660066e-06, | |
| "loss": 0.2491, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 2.648106904231626, | |
| "grad_norm": 0.18786731467851306, | |
| "learning_rate": 6.55940594059406e-06, | |
| "loss": 0.2634, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 2.6503340757238307, | |
| "grad_norm": 0.17288807093480227, | |
| "learning_rate": 6.518151815181518e-06, | |
| "loss": 0.2666, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.6525612472160356, | |
| "grad_norm": 0.18393738485159578, | |
| "learning_rate": 6.476897689768978e-06, | |
| "loss": 0.2551, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 2.6547884187082404, | |
| "grad_norm": 0.18030281904212597, | |
| "learning_rate": 6.4356435643564364e-06, | |
| "loss": 0.2658, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 2.6570155902004453, | |
| "grad_norm": 0.1827429575224797, | |
| "learning_rate": 6.394389438943895e-06, | |
| "loss": 0.2722, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 2.65924276169265, | |
| "grad_norm": 0.17988767728703248, | |
| "learning_rate": 6.353135313531354e-06, | |
| "loss": 0.2649, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 2.661469933184855, | |
| "grad_norm": 0.1747455257273141, | |
| "learning_rate": 6.311881188118812e-06, | |
| "loss": 0.2609, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 2.66369710467706, | |
| "grad_norm": 0.1890842571379513, | |
| "learning_rate": 6.27062706270627e-06, | |
| "loss": 0.2836, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 2.665924276169265, | |
| "grad_norm": 0.21595638844541948, | |
| "learning_rate": 6.2293729372937294e-06, | |
| "loss": 0.2995, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 2.66815144766147, | |
| "grad_norm": 0.17746447661036516, | |
| "learning_rate": 6.1881188118811885e-06, | |
| "loss": 0.2693, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 2.670378619153675, | |
| "grad_norm": 0.17735444851193852, | |
| "learning_rate": 6.146864686468648e-06, | |
| "loss": 0.2723, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 2.6726057906458798, | |
| "grad_norm": 0.18981324382870987, | |
| "learning_rate": 6.105610561056106e-06, | |
| "loss": 0.2752, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.6748329621380846, | |
| "grad_norm": 0.18729354512349214, | |
| "learning_rate": 6.064356435643564e-06, | |
| "loss": 0.2645, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 2.6770601336302895, | |
| "grad_norm": 0.18309412763714775, | |
| "learning_rate": 6.023102310231023e-06, | |
| "loss": 0.2702, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 2.6792873051224944, | |
| "grad_norm": 0.18164682701271706, | |
| "learning_rate": 5.981848184818482e-06, | |
| "loss": 0.2635, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 2.681514476614699, | |
| "grad_norm": 0.1899876797983057, | |
| "learning_rate": 5.940594059405941e-06, | |
| "loss": 0.2766, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 2.683741648106904, | |
| "grad_norm": 0.1827526700599873, | |
| "learning_rate": 5.8993399339934e-06, | |
| "loss": 0.2671, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 2.6859688195991094, | |
| "grad_norm": 0.19843184360259508, | |
| "learning_rate": 5.858085808580859e-06, | |
| "loss": 0.2805, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 2.6881959910913142, | |
| "grad_norm": 0.17624294896059342, | |
| "learning_rate": 5.816831683168317e-06, | |
| "loss": 0.2674, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 2.690423162583519, | |
| "grad_norm": 0.19016213300099563, | |
| "learning_rate": 5.775577557755775e-06, | |
| "loss": 0.2963, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 2.692650334075724, | |
| "grad_norm": 0.1917312001534066, | |
| "learning_rate": 5.7343234323432344e-06, | |
| "loss": 0.2818, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 2.694877505567929, | |
| "grad_norm": 0.1798024925266007, | |
| "learning_rate": 5.6930693069306936e-06, | |
| "loss": 0.2589, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.6971046770601337, | |
| "grad_norm": 0.20136922752025474, | |
| "learning_rate": 5.651815181518152e-06, | |
| "loss": 0.2811, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 2.6993318485523385, | |
| "grad_norm": 0.18163309652399173, | |
| "learning_rate": 5.610561056105611e-06, | |
| "loss": 0.2856, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 2.7015590200445434, | |
| "grad_norm": 0.18234375247944756, | |
| "learning_rate": 5.56930693069307e-06, | |
| "loss": 0.2602, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 2.7037861915367483, | |
| "grad_norm": 0.17889900699377081, | |
| "learning_rate": 5.528052805280528e-06, | |
| "loss": 0.2639, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 2.706013363028953, | |
| "grad_norm": 0.18490262644707528, | |
| "learning_rate": 5.4867986798679865e-06, | |
| "loss": 0.2718, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 2.708240534521158, | |
| "grad_norm": 0.1954391315622027, | |
| "learning_rate": 5.445544554455446e-06, | |
| "loss": 0.2854, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 2.710467706013363, | |
| "grad_norm": 0.19260250643946217, | |
| "learning_rate": 5.404290429042905e-06, | |
| "loss": 0.2716, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 2.7126948775055677, | |
| "grad_norm": 0.18831692036934417, | |
| "learning_rate": 5.363036303630363e-06, | |
| "loss": 0.2552, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 2.7149220489977726, | |
| "grad_norm": 0.17430274431770415, | |
| "learning_rate": 5.321782178217822e-06, | |
| "loss": 0.2561, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 2.717149220489978, | |
| "grad_norm": 0.1742355682043371, | |
| "learning_rate": 5.280528052805281e-06, | |
| "loss": 0.2587, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.7193763919821827, | |
| "grad_norm": 0.17609385974503883, | |
| "learning_rate": 5.2392739273927395e-06, | |
| "loss": 0.2632, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 2.7216035634743876, | |
| "grad_norm": 0.18316692056914868, | |
| "learning_rate": 5.198019801980198e-06, | |
| "loss": 0.2689, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 2.7238307349665924, | |
| "grad_norm": 0.18141791204316718, | |
| "learning_rate": 5.156765676567657e-06, | |
| "loss": 0.2699, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 2.7260579064587973, | |
| "grad_norm": 0.18466460423749684, | |
| "learning_rate": 5.115511551155116e-06, | |
| "loss": 0.271, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 2.728285077951002, | |
| "grad_norm": 0.17871322125662872, | |
| "learning_rate": 5.074257425742575e-06, | |
| "loss": 0.2707, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 2.730512249443207, | |
| "grad_norm": 0.18054918919202892, | |
| "learning_rate": 5.033003300330033e-06, | |
| "loss": 0.2742, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 2.732739420935412, | |
| "grad_norm": 0.1909635970563273, | |
| "learning_rate": 4.991749174917492e-06, | |
| "loss": 0.2834, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 2.734966592427617, | |
| "grad_norm": 0.1865446441034419, | |
| "learning_rate": 4.950495049504951e-06, | |
| "loss": 0.2543, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 2.737193763919822, | |
| "grad_norm": 0.18186036719557713, | |
| "learning_rate": 4.909240924092409e-06, | |
| "loss": 0.2467, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 2.739420935412027, | |
| "grad_norm": 0.17895945173012268, | |
| "learning_rate": 4.867986798679868e-06, | |
| "loss": 0.258, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.7416481069042318, | |
| "grad_norm": 0.21399121434691468, | |
| "learning_rate": 4.826732673267327e-06, | |
| "loss": 0.2685, | |
| "step": 1231 | |
| }, | |
| { | |
| "epoch": 2.7438752783964366, | |
| "grad_norm": 0.18181551081292457, | |
| "learning_rate": 4.785478547854786e-06, | |
| "loss": 0.2677, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 2.7461024498886415, | |
| "grad_norm": 0.18119449697633516, | |
| "learning_rate": 4.7442244224422445e-06, | |
| "loss": 0.2631, | |
| "step": 1233 | |
| }, | |
| { | |
| "epoch": 2.7483296213808464, | |
| "grad_norm": 0.18462309715029207, | |
| "learning_rate": 4.702970297029704e-06, | |
| "loss": 0.2687, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 2.750556792873051, | |
| "grad_norm": 0.18693994168103942, | |
| "learning_rate": 4.661716171617162e-06, | |
| "loss": 0.2738, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 2.752783964365256, | |
| "grad_norm": 0.1811090852572252, | |
| "learning_rate": 4.62046204620462e-06, | |
| "loss": 0.266, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 2.755011135857461, | |
| "grad_norm": 0.1863451840223255, | |
| "learning_rate": 4.579207920792079e-06, | |
| "loss": 0.2693, | |
| "step": 1237 | |
| }, | |
| { | |
| "epoch": 2.757238307349666, | |
| "grad_norm": 0.18547488130828133, | |
| "learning_rate": 4.537953795379538e-06, | |
| "loss": 0.286, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 2.7594654788418707, | |
| "grad_norm": 0.20698222310531328, | |
| "learning_rate": 4.496699669966997e-06, | |
| "loss": 0.2702, | |
| "step": 1239 | |
| }, | |
| { | |
| "epoch": 2.7616926503340755, | |
| "grad_norm": 0.1758322265902654, | |
| "learning_rate": 4.455445544554456e-06, | |
| "loss": 0.2541, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.7639198218262804, | |
| "grad_norm": 0.1844287145188825, | |
| "learning_rate": 4.414191419141915e-06, | |
| "loss": 0.2718, | |
| "step": 1241 | |
| }, | |
| { | |
| "epoch": 2.7661469933184857, | |
| "grad_norm": 0.18272135591893765, | |
| "learning_rate": 4.372937293729373e-06, | |
| "loss": 0.2703, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 2.7683741648106905, | |
| "grad_norm": 0.19633638949830934, | |
| "learning_rate": 4.331683168316831e-06, | |
| "loss": 0.2827, | |
| "step": 1243 | |
| }, | |
| { | |
| "epoch": 2.7706013363028954, | |
| "grad_norm": 0.1972991253891059, | |
| "learning_rate": 4.29042904290429e-06, | |
| "loss": 0.2833, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 2.7728285077951003, | |
| "grad_norm": 0.1848672068698584, | |
| "learning_rate": 4.2491749174917495e-06, | |
| "loss": 0.2611, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 2.775055679287305, | |
| "grad_norm": 0.17608402133914777, | |
| "learning_rate": 4.207920792079209e-06, | |
| "loss": 0.2516, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 2.77728285077951, | |
| "grad_norm": 0.18690090446720733, | |
| "learning_rate": 4.166666666666667e-06, | |
| "loss": 0.263, | |
| "step": 1247 | |
| }, | |
| { | |
| "epoch": 2.779510022271715, | |
| "grad_norm": 0.1976897326681855, | |
| "learning_rate": 4.125412541254126e-06, | |
| "loss": 0.2741, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 2.7817371937639197, | |
| "grad_norm": 0.18113007486279845, | |
| "learning_rate": 4.084158415841584e-06, | |
| "loss": 0.2709, | |
| "step": 1249 | |
| }, | |
| { | |
| "epoch": 2.7839643652561246, | |
| "grad_norm": 0.1738680334739268, | |
| "learning_rate": 4.0429042904290425e-06, | |
| "loss": 0.2609, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.78619153674833, | |
| "grad_norm": 0.1895039359894699, | |
| "learning_rate": 4.001650165016502e-06, | |
| "loss": 0.272, | |
| "step": 1251 | |
| }, | |
| { | |
| "epoch": 2.7884187082405347, | |
| "grad_norm": 0.18086962959438846, | |
| "learning_rate": 3.960396039603961e-06, | |
| "loss": 0.2563, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 2.7906458797327396, | |
| "grad_norm": 0.18645414962303633, | |
| "learning_rate": 3.91914191419142e-06, | |
| "loss": 0.2929, | |
| "step": 1253 | |
| }, | |
| { | |
| "epoch": 2.7928730512249444, | |
| "grad_norm": 0.17538676931393385, | |
| "learning_rate": 3.877887788778878e-06, | |
| "loss": 0.261, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 2.7951002227171493, | |
| "grad_norm": 0.17785412889632757, | |
| "learning_rate": 3.836633663366337e-06, | |
| "loss": 0.2689, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 2.797327394209354, | |
| "grad_norm": 0.18324305459455334, | |
| "learning_rate": 3.795379537953796e-06, | |
| "loss": 0.2697, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 2.799554565701559, | |
| "grad_norm": 0.18648774005120053, | |
| "learning_rate": 3.7541254125412545e-06, | |
| "loss": 0.2715, | |
| "step": 1257 | |
| }, | |
| { | |
| "epoch": 2.801781737193764, | |
| "grad_norm": 0.18199394587678758, | |
| "learning_rate": 3.7128712871287128e-06, | |
| "loss": 0.2644, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 2.8040089086859687, | |
| "grad_norm": 0.1778832472052361, | |
| "learning_rate": 3.671617161716172e-06, | |
| "loss": 0.2775, | |
| "step": 1259 | |
| }, | |
| { | |
| "epoch": 2.8062360801781736, | |
| "grad_norm": 0.18580722059990096, | |
| "learning_rate": 3.6303630363036306e-06, | |
| "loss": 0.2862, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.8084632516703785, | |
| "grad_norm": 0.18284126010620624, | |
| "learning_rate": 3.589108910891089e-06, | |
| "loss": 0.2777, | |
| "step": 1261 | |
| }, | |
| { | |
| "epoch": 2.8106904231625833, | |
| "grad_norm": 0.17793340543250122, | |
| "learning_rate": 3.547854785478548e-06, | |
| "loss": 0.2695, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 2.812917594654788, | |
| "grad_norm": 0.17026032822717652, | |
| "learning_rate": 3.506600660066007e-06, | |
| "loss": 0.2659, | |
| "step": 1263 | |
| }, | |
| { | |
| "epoch": 2.815144766146993, | |
| "grad_norm": 0.2009194626526499, | |
| "learning_rate": 3.4653465346534657e-06, | |
| "loss": 0.2656, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 2.8173719376391984, | |
| "grad_norm": 0.18505755658284295, | |
| "learning_rate": 3.424092409240924e-06, | |
| "loss": 0.2689, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 2.819599109131403, | |
| "grad_norm": 0.19346965340451247, | |
| "learning_rate": 3.382838283828383e-06, | |
| "loss": 0.2709, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 2.821826280623608, | |
| "grad_norm": 0.18373096351793147, | |
| "learning_rate": 3.3415841584158418e-06, | |
| "loss": 0.2724, | |
| "step": 1267 | |
| }, | |
| { | |
| "epoch": 2.824053452115813, | |
| "grad_norm": 0.17744038770228643, | |
| "learning_rate": 3.3003300330033e-06, | |
| "loss": 0.26, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 2.826280623608018, | |
| "grad_norm": 0.17737412754439033, | |
| "learning_rate": 3.259075907590759e-06, | |
| "loss": 0.2668, | |
| "step": 1269 | |
| }, | |
| { | |
| "epoch": 2.8285077951002227, | |
| "grad_norm": 0.17079177918726388, | |
| "learning_rate": 3.2178217821782182e-06, | |
| "loss": 0.2598, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.8307349665924275, | |
| "grad_norm": 0.1797365633284667, | |
| "learning_rate": 3.176567656765677e-06, | |
| "loss": 0.2507, | |
| "step": 1271 | |
| }, | |
| { | |
| "epoch": 2.8329621380846324, | |
| "grad_norm": 0.183335091712768, | |
| "learning_rate": 3.135313531353135e-06, | |
| "loss": 0.2788, | |
| "step": 1272 | |
| }, | |
| { | |
| "epoch": 2.8351893095768377, | |
| "grad_norm": 0.17765873948179847, | |
| "learning_rate": 3.0940594059405943e-06, | |
| "loss": 0.251, | |
| "step": 1273 | |
| }, | |
| { | |
| "epoch": 2.8374164810690425, | |
| "grad_norm": 0.17938524809533818, | |
| "learning_rate": 3.052805280528053e-06, | |
| "loss": 0.2744, | |
| "step": 1274 | |
| }, | |
| { | |
| "epoch": 2.8396436525612474, | |
| "grad_norm": 0.17995840306068717, | |
| "learning_rate": 3.0115511551155116e-06, | |
| "loss": 0.2625, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 2.8418708240534523, | |
| "grad_norm": 0.18844273854538926, | |
| "learning_rate": 2.9702970297029703e-06, | |
| "loss": 0.2814, | |
| "step": 1276 | |
| }, | |
| { | |
| "epoch": 2.844097995545657, | |
| "grad_norm": 0.17983513000984847, | |
| "learning_rate": 2.9290429042904294e-06, | |
| "loss": 0.2647, | |
| "step": 1277 | |
| }, | |
| { | |
| "epoch": 2.846325167037862, | |
| "grad_norm": 0.18854659104915525, | |
| "learning_rate": 2.8877887788778877e-06, | |
| "loss": 0.2786, | |
| "step": 1278 | |
| }, | |
| { | |
| "epoch": 2.848552338530067, | |
| "grad_norm": 0.19009114215108075, | |
| "learning_rate": 2.8465346534653468e-06, | |
| "loss": 0.2833, | |
| "step": 1279 | |
| }, | |
| { | |
| "epoch": 2.8507795100222717, | |
| "grad_norm": 0.17542012697195186, | |
| "learning_rate": 2.8052805280528055e-06, | |
| "loss": 0.27, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.8530066815144766, | |
| "grad_norm": 0.1735572771297532, | |
| "learning_rate": 2.764026402640264e-06, | |
| "loss": 0.2591, | |
| "step": 1281 | |
| }, | |
| { | |
| "epoch": 2.8552338530066814, | |
| "grad_norm": 0.17811981123044532, | |
| "learning_rate": 2.722772277227723e-06, | |
| "loss": 0.2686, | |
| "step": 1282 | |
| }, | |
| { | |
| "epoch": 2.8574610244988863, | |
| "grad_norm": 0.1879360008971378, | |
| "learning_rate": 2.6815181518151815e-06, | |
| "loss": 0.2664, | |
| "step": 1283 | |
| }, | |
| { | |
| "epoch": 2.859688195991091, | |
| "grad_norm": 0.17888528677298804, | |
| "learning_rate": 2.6402640264026406e-06, | |
| "loss": 0.2701, | |
| "step": 1284 | |
| }, | |
| { | |
| "epoch": 2.861915367483296, | |
| "grad_norm": 0.17662474501142036, | |
| "learning_rate": 2.599009900990099e-06, | |
| "loss": 0.2826, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 2.864142538975501, | |
| "grad_norm": 0.18906031553549424, | |
| "learning_rate": 2.557755775577558e-06, | |
| "loss": 0.2866, | |
| "step": 1286 | |
| }, | |
| { | |
| "epoch": 2.866369710467706, | |
| "grad_norm": 0.17354639342193298, | |
| "learning_rate": 2.5165016501650166e-06, | |
| "loss": 0.2502, | |
| "step": 1287 | |
| }, | |
| { | |
| "epoch": 2.868596881959911, | |
| "grad_norm": 0.17707561914042097, | |
| "learning_rate": 2.4752475247524753e-06, | |
| "loss": 0.2705, | |
| "step": 1288 | |
| }, | |
| { | |
| "epoch": 2.870824053452116, | |
| "grad_norm": 0.17357571373149694, | |
| "learning_rate": 2.433993399339934e-06, | |
| "loss": 0.2598, | |
| "step": 1289 | |
| }, | |
| { | |
| "epoch": 2.8730512249443207, | |
| "grad_norm": 0.18095560062308227, | |
| "learning_rate": 2.392739273927393e-06, | |
| "loss": 0.2635, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.8752783964365256, | |
| "grad_norm": 0.17612400842571985, | |
| "learning_rate": 2.351485148514852e-06, | |
| "loss": 0.2554, | |
| "step": 1291 | |
| }, | |
| { | |
| "epoch": 2.8775055679287305, | |
| "grad_norm": 0.18632630258694613, | |
| "learning_rate": 2.31023102310231e-06, | |
| "loss": 0.2754, | |
| "step": 1292 | |
| }, | |
| { | |
| "epoch": 2.8797327394209353, | |
| "grad_norm": 0.176004846745384, | |
| "learning_rate": 2.268976897689769e-06, | |
| "loss": 0.2661, | |
| "step": 1293 | |
| }, | |
| { | |
| "epoch": 2.88195991091314, | |
| "grad_norm": 0.1844978464314028, | |
| "learning_rate": 2.227722772277228e-06, | |
| "loss": 0.2703, | |
| "step": 1294 | |
| }, | |
| { | |
| "epoch": 2.884187082405345, | |
| "grad_norm": 0.17388026417343408, | |
| "learning_rate": 2.1864686468646865e-06, | |
| "loss": 0.2594, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 2.8864142538975504, | |
| "grad_norm": 0.1746902632671786, | |
| "learning_rate": 2.145214521452145e-06, | |
| "loss": 0.2604, | |
| "step": 1296 | |
| }, | |
| { | |
| "epoch": 2.888641425389755, | |
| "grad_norm": 0.17388558045342958, | |
| "learning_rate": 2.1039603960396043e-06, | |
| "loss": 0.2735, | |
| "step": 1297 | |
| }, | |
| { | |
| "epoch": 2.89086859688196, | |
| "grad_norm": 0.1805564659412802, | |
| "learning_rate": 2.062706270627063e-06, | |
| "loss": 0.2795, | |
| "step": 1298 | |
| }, | |
| { | |
| "epoch": 2.893095768374165, | |
| "grad_norm": 0.18299640212213222, | |
| "learning_rate": 2.0214521452145212e-06, | |
| "loss": 0.2788, | |
| "step": 1299 | |
| }, | |
| { | |
| "epoch": 2.89532293986637, | |
| "grad_norm": 0.1726098063623092, | |
| "learning_rate": 1.9801980198019803e-06, | |
| "loss": 0.2695, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.8975501113585747, | |
| "grad_norm": 0.7589122167800628, | |
| "learning_rate": 1.938943894389439e-06, | |
| "loss": 0.2756, | |
| "step": 1301 | |
| }, | |
| { | |
| "epoch": 2.8997772828507795, | |
| "grad_norm": 0.17268798276447622, | |
| "learning_rate": 1.897689768976898e-06, | |
| "loss": 0.2676, | |
| "step": 1302 | |
| }, | |
| { | |
| "epoch": 2.9020044543429844, | |
| "grad_norm": 0.1761435400733331, | |
| "learning_rate": 1.8564356435643564e-06, | |
| "loss": 0.2631, | |
| "step": 1303 | |
| }, | |
| { | |
| "epoch": 2.9042316258351892, | |
| "grad_norm": 0.19236534492394342, | |
| "learning_rate": 1.8151815181518153e-06, | |
| "loss": 0.2894, | |
| "step": 1304 | |
| }, | |
| { | |
| "epoch": 2.906458797327394, | |
| "grad_norm": 0.1837113832497075, | |
| "learning_rate": 1.773927392739274e-06, | |
| "loss": 0.2577, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 2.908685968819599, | |
| "grad_norm": 0.18799332234673125, | |
| "learning_rate": 1.7326732673267329e-06, | |
| "loss": 0.2895, | |
| "step": 1306 | |
| }, | |
| { | |
| "epoch": 2.910913140311804, | |
| "grad_norm": 0.16575965823267147, | |
| "learning_rate": 1.6914191419141915e-06, | |
| "loss": 0.2586, | |
| "step": 1307 | |
| }, | |
| { | |
| "epoch": 2.9131403118040087, | |
| "grad_norm": 0.1821331264805557, | |
| "learning_rate": 1.65016501650165e-06, | |
| "loss": 0.2901, | |
| "step": 1308 | |
| }, | |
| { | |
| "epoch": 2.9153674832962135, | |
| "grad_norm": 0.20278623871064896, | |
| "learning_rate": 1.6089108910891091e-06, | |
| "loss": 0.2547, | |
| "step": 1309 | |
| }, | |
| { | |
| "epoch": 2.917594654788419, | |
| "grad_norm": 0.18693643097264878, | |
| "learning_rate": 1.5676567656765676e-06, | |
| "loss": 0.2665, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.9198218262806237, | |
| "grad_norm": 0.18251206885926352, | |
| "learning_rate": 1.5264026402640265e-06, | |
| "loss": 0.305, | |
| "step": 1311 | |
| }, | |
| { | |
| "epoch": 2.9220489977728286, | |
| "grad_norm": 0.1795333615670338, | |
| "learning_rate": 1.4851485148514852e-06, | |
| "loss": 0.2746, | |
| "step": 1312 | |
| }, | |
| { | |
| "epoch": 2.9242761692650334, | |
| "grad_norm": 0.17714431772046693, | |
| "learning_rate": 1.4438943894389438e-06, | |
| "loss": 0.2627, | |
| "step": 1313 | |
| }, | |
| { | |
| "epoch": 2.9265033407572383, | |
| "grad_norm": 0.17660747696135468, | |
| "learning_rate": 1.4026402640264027e-06, | |
| "loss": 0.272, | |
| "step": 1314 | |
| }, | |
| { | |
| "epoch": 2.928730512249443, | |
| "grad_norm": 0.18249194400005841, | |
| "learning_rate": 1.3613861386138614e-06, | |
| "loss": 0.2667, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 2.930957683741648, | |
| "grad_norm": 0.18093238717836005, | |
| "learning_rate": 1.3201320132013203e-06, | |
| "loss": 0.2756, | |
| "step": 1316 | |
| }, | |
| { | |
| "epoch": 2.933184855233853, | |
| "grad_norm": 0.1857417372113312, | |
| "learning_rate": 1.278877887788779e-06, | |
| "loss": 0.2765, | |
| "step": 1317 | |
| }, | |
| { | |
| "epoch": 2.935412026726058, | |
| "grad_norm": 0.18733495569539454, | |
| "learning_rate": 1.2376237623762377e-06, | |
| "loss": 0.2798, | |
| "step": 1318 | |
| }, | |
| { | |
| "epoch": 2.937639198218263, | |
| "grad_norm": 0.17193783973837945, | |
| "learning_rate": 1.1963696369636966e-06, | |
| "loss": 0.2521, | |
| "step": 1319 | |
| }, | |
| { | |
| "epoch": 2.939866369710468, | |
| "grad_norm": 0.16811094774035254, | |
| "learning_rate": 1.155115511551155e-06, | |
| "loss": 0.2579, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.9420935412026727, | |
| "grad_norm": 0.1793421904458536, | |
| "learning_rate": 1.113861386138614e-06, | |
| "loss": 0.2698, | |
| "step": 1321 | |
| }, | |
| { | |
| "epoch": 2.9443207126948776, | |
| "grad_norm": 0.17505515975567199, | |
| "learning_rate": 1.0726072607260726e-06, | |
| "loss": 0.2685, | |
| "step": 1322 | |
| }, | |
| { | |
| "epoch": 2.9465478841870825, | |
| "grad_norm": 0.18282625596514968, | |
| "learning_rate": 1.0313531353135315e-06, | |
| "loss": 0.2736, | |
| "step": 1323 | |
| }, | |
| { | |
| "epoch": 2.9487750556792873, | |
| "grad_norm": 0.16894345591206814, | |
| "learning_rate": 9.900990099009902e-07, | |
| "loss": 0.2564, | |
| "step": 1324 | |
| }, | |
| { | |
| "epoch": 2.951002227171492, | |
| "grad_norm": 0.17025646518611448, | |
| "learning_rate": 9.48844884488449e-07, | |
| "loss": 0.2599, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 2.953229398663697, | |
| "grad_norm": 0.17513515627006346, | |
| "learning_rate": 9.075907590759076e-07, | |
| "loss": 0.2791, | |
| "step": 1326 | |
| }, | |
| { | |
| "epoch": 2.955456570155902, | |
| "grad_norm": 0.1828597861925402, | |
| "learning_rate": 8.663366336633664e-07, | |
| "loss": 0.2889, | |
| "step": 1327 | |
| }, | |
| { | |
| "epoch": 2.9576837416481068, | |
| "grad_norm": 0.17806935040212402, | |
| "learning_rate": 8.25082508250825e-07, | |
| "loss": 0.2766, | |
| "step": 1328 | |
| }, | |
| { | |
| "epoch": 2.9599109131403116, | |
| "grad_norm": 0.17322189701290536, | |
| "learning_rate": 7.838283828382838e-07, | |
| "loss": 0.2774, | |
| "step": 1329 | |
| }, | |
| { | |
| "epoch": 2.9621380846325165, | |
| "grad_norm": 0.17996043400629883, | |
| "learning_rate": 7.425742574257426e-07, | |
| "loss": 0.2734, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.9643652561247213, | |
| "grad_norm": 0.17947187484562413, | |
| "learning_rate": 7.013201320132014e-07, | |
| "loss": 0.2713, | |
| "step": 1331 | |
| }, | |
| { | |
| "epoch": 2.9665924276169267, | |
| "grad_norm": 0.19967183239652675, | |
| "learning_rate": 6.600660066006602e-07, | |
| "loss": 0.2645, | |
| "step": 1332 | |
| }, | |
| { | |
| "epoch": 2.9688195991091315, | |
| "grad_norm": 0.16870682519840827, | |
| "learning_rate": 6.188118811881188e-07, | |
| "loss": 0.2653, | |
| "step": 1333 | |
| }, | |
| { | |
| "epoch": 2.9710467706013364, | |
| "grad_norm": 0.17344643257978654, | |
| "learning_rate": 5.775577557755775e-07, | |
| "loss": 0.2486, | |
| "step": 1334 | |
| }, | |
| { | |
| "epoch": 2.9732739420935412, | |
| "grad_norm": 0.1818486260078175, | |
| "learning_rate": 5.363036303630363e-07, | |
| "loss": 0.2704, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 2.975501113585746, | |
| "grad_norm": 0.18264295704074213, | |
| "learning_rate": 4.950495049504951e-07, | |
| "loss": 0.2847, | |
| "step": 1336 | |
| }, | |
| { | |
| "epoch": 2.977728285077951, | |
| "grad_norm": 0.18007690267794807, | |
| "learning_rate": 4.537953795379538e-07, | |
| "loss": 0.2623, | |
| "step": 1337 | |
| }, | |
| { | |
| "epoch": 2.979955456570156, | |
| "grad_norm": 0.1732717832641267, | |
| "learning_rate": 4.125412541254125e-07, | |
| "loss": 0.2572, | |
| "step": 1338 | |
| }, | |
| { | |
| "epoch": 2.9821826280623607, | |
| "grad_norm": 0.16832712239665867, | |
| "learning_rate": 3.712871287128713e-07, | |
| "loss": 0.2553, | |
| "step": 1339 | |
| }, | |
| { | |
| "epoch": 2.984409799554566, | |
| "grad_norm": 0.1944252415604603, | |
| "learning_rate": 3.300330033003301e-07, | |
| "loss": 0.2639, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.986636971046771, | |
| "grad_norm": 0.20639972423223196, | |
| "learning_rate": 2.8877887788778876e-07, | |
| "loss": 0.2806, | |
| "step": 1341 | |
| }, | |
| { | |
| "epoch": 2.9888641425389757, | |
| "grad_norm": 0.1910915850331487, | |
| "learning_rate": 2.4752475247524754e-07, | |
| "loss": 0.2728, | |
| "step": 1342 | |
| }, | |
| { | |
| "epoch": 2.9910913140311806, | |
| "grad_norm": 0.1756097477275674, | |
| "learning_rate": 2.0627062706270625e-07, | |
| "loss": 0.272, | |
| "step": 1343 | |
| }, | |
| { | |
| "epoch": 2.9933184855233854, | |
| "grad_norm": 0.17215083407744072, | |
| "learning_rate": 1.6501650165016504e-07, | |
| "loss": 0.2705, | |
| "step": 1344 | |
| }, | |
| { | |
| "epoch": 2.9955456570155903, | |
| "grad_norm": 0.18331819283347497, | |
| "learning_rate": 1.2376237623762377e-07, | |
| "loss": 0.2926, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 2.997772828507795, | |
| "grad_norm": 0.17390758148350594, | |
| "learning_rate": 8.250825082508252e-08, | |
| "loss": 0.2698, | |
| "step": 1346 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.17310367320777945, | |
| "learning_rate": 4.125412541254126e-08, | |
| "loss": 0.2685, | |
| "step": 1347 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 1347, | |
| "total_flos": 1.4980518380283363e+19, | |
| "train_loss": 0.4525371772342907, | |
| "train_runtime": 41466.0073, | |
| "train_samples_per_second": 0.52, | |
| "train_steps_per_second": 0.032 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1347, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.4980518380283363e+19, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |