Lines Matching refs:LR

312       "Epoch [0] Loss: 1.6627886372227823, LR 0.01\n",
313 "Epoch [1] Loss: 1.210370733382854, LR 0.01\n",
314 "Epoch [2] Loss: 0.9692377131035987, LR 0.01\n",
315 "Epoch [3] Loss: 0.7976046623067653, LR 0.01\n",
316 "Epoch [4] Loss: 0.5714595343476983, LR 0.01\n",
317 "Epoch [5] Loss: 0.4458411196444897, LR 0.01\n",
318 "Epoch [6] Loss: 0.36039798817736035, LR 0.01\n",
319 "Epoch [7] Loss: 0.32665719377233626, LR 0.01\n",
320 "Epoch [8] Loss: 0.262064205702915, LR 0.01\n",
321 "Epoch [9] Loss: 0.22285924059279422, LR 0.0075\n",
322 "Epoch [10] Loss: 0.19018426854559717, LR 0.0075\n",
323 "Epoch [11] Loss: 0.1718730723604243, LR 0.0075\n",
324 "Epoch [12] Loss: 0.15736752171670237, LR 0.0075\n",
325 "Epoch [13] Loss: 0.14579375246737866, LR 0.0075\n",
326 "Epoch [14] Loss: 0.13546599733068587, LR 0.0075\n",
327 "Epoch [15] Loss: 0.12490207590955368, LR 0.0075\n",
328 "Epoch [16] Loss: 0.11803316300915133, LR 0.0075\n",
329 "Epoch [17] Loss: 0.10653189395336395, LR 0.0075\n",
330 "Epoch [18] Loss: 0.10514750379197141, LR 0.0075\n",
331 "Epoch [19] Loss: 0.09590611559279422, LR 0.005625\n",
332 "Epoch [20] Loss: 0.08146028108494256, LR 0.005625\n",
333 "Epoch [21] Loss: 0.07707348782965477, LR 0.005625\n",
334 "Epoch [22] Loss: 0.07206193436967566, LR 0.005625\n",
335 "Epoch [23] Loss: 0.07001185417175293, LR 0.005625\n",
336 "Epoch [24] Loss: 0.06797058351578252, LR 0.005625\n",
337 "Epoch [25] Loss: 0.0649358110224947, LR 0.005625\n",
338 "Epoch [26] Loss: 0.06219124286732775, LR 0.005625\n",
339 "Epoch [27] Loss: 0.06075144828634059, LR 0.005625\n",
340 "Epoch [28] Loss: 0.05711334495134251, LR 0.005625\n",
341 "Epoch [29] Loss: 0.054747099572039666, LR 0.00421875\n",
342 "Epoch [30] Loss: 0.0441775271233092, LR 0.00421875\n",
343 "Epoch [31] Loss: 0.041551097910454936, LR 0.00421875\n",
344 "Epoch [32] Loss: 0.04095017269093503, LR 0.00421875\n",
345 "Epoch [33] Loss: 0.04045371045457556, LR 0.00421875\n",
346 "Epoch [34] Loss: 0.038867686657195394, LR 0.00421875\n",
347 "Epoch [35] Loss: 0.038131744303601854, LR 0.00421875\n",
348 "Epoch [36] Loss: 0.039834817250569664, LR 0.00421875\n",
349 "Epoch [37] Loss: 0.03669035941996473, LR 0.00421875\n",
350 "Epoch [38] Loss: 0.03373505967728635, LR 0.00421875\n",
351 "Epoch [39] Loss: 0.03164981273894615, LR 0.0031640625\n",
352 "Epoch [40] Loss: 0.025532766055035336, LR 0.0031640625\n",
353 "Epoch [41] Loss: 0.022659448867148543, LR 0.0031640625\n",
354 "Epoch [42] Loss: 0.02307056112492338, LR 0.0031640625\n",
355 "Epoch [43] Loss: 0.02236944056571798, LR 0.0031640625\n",
356 "Epoch [44] Loss: 0.022204211963120328, LR 0.0031640625\n",
357 "Epoch [45] Loss: 0.02262336903430046, LR 0.0031640625\n",
358 "Epoch [46] Loss: 0.02253308448385685, LR 0.0031640625\n",
359 "Epoch [47] Loss: 0.025286573044797207, LR 0.0031640625\n",
360 "Epoch [48] Loss: 0.02439300988310127, LR 0.0031640625\n",
361 "Epoch [49] Loss: 0.017976388018181983, LR 0.002373046875\n",
362 "Epoch [50] Loss: 0.014343131095805067, LR 0.002373046875\n",
363 "Epoch [51] Loss: 0.013039355582379281, LR 0.002373046875\n",
364 "Epoch [52] Loss: 0.011884741885687715, LR 0.002373046875\n",
365 "Epoch [53] Loss: 0.011438189668858305, LR 0.002373046875\n",
366 "Epoch [54] Loss: 0.011447292693117832, LR 0.002373046875\n",
367 "Epoch [55] Loss: 0.014212571560068334, LR 0.002373046875\n",
368 "Epoch [56] Loss: 0.019900493724371797, LR 0.002373046875\n",
369 "Epoch [57] Loss: 0.02102568301748722, LR 0.002373046875\n",
370 "Epoch [58] Loss: 0.01346214400961044, LR 0.002373046875\n",
371 "Epoch [59] Loss: 0.010107964911359422, LR 0.0017797851562500002\n",
372 "Epoch [60] Loss: 0.008353193600972494, LR 0.0017797851562500002\n",
373 "Epoch [61] Loss: 0.007678258292218472, LR 0.0017797851562500002\n",
374 "Epoch [62] Loss: 0.007262124660167288, LR 0.0017797851562500002\n",
375 "Epoch [63] Loss: 0.00705223578087827, LR 0.0017797851562500002\n",
376 "Epoch [64] Loss: 0.006788556293774677, LR 0.0017797851562500002\n",
377 "Epoch [65] Loss: 0.006473606571238091, LR 0.0017797851562500002\n",
378 "Epoch [66] Loss: 0.006206096486842378, LR 0.0017797851562500002\n",
379 "Epoch [67] Loss: 0.00584477313021396, LR 0.0017797851562500002\n",
380 "Epoch [68] Loss: 0.005648705267137097, LR 0.0017797851562500002\n",
381 "Epoch [69] Loss: 0.006481769871204458, LR 0.0013348388671875003\n",
382 "Epoch [70] Loss: 0.008430448618341, LR 0.0013348388671875003\n",
383 "Epoch [71] Loss: 0.006877245421105242, LR 0.0013348388671875003\n",
384 "Epoch [72] Loss: 0.005671108281740578, LR 0.0013348388671875003\n",
385 "Epoch [73] Loss: 0.004832422162624116, LR 0.0013348388671875003\n",
386 "Epoch [74] Loss: 0.004441103402604448, LR 0.0013348388671875003\n",
387 "Epoch [75] Loss: 0.004216198591475791, LR 0.0013348388671875003\n",
388 "Epoch [76] Loss: 0.004041922989711967, LR 0.0013348388671875003\n",
389 "Epoch [77] Loss: 0.003937713643337818, LR 0.0013348388671875003\n",
390 "Epoch [78] Loss: 0.010251983049068046, LR 0.0013348388671875003\n",
391 "Epoch [79] Loss: 0.01829354052848004, LR 0.0010011291503906252\n",
392 "Epoch [80] Loss: 0.006723233448561802, LR 0.0010011291503906252\n",
393 "Epoch [81] Loss: 0.004397524798170049, LR 0.0010011291503906252\n",
394 "Epoch [82] Loss: 0.0038475305476087206, LR 0.0010011291503906252\n",
395 "Epoch [83] Loss: 0.003591177945441388, LR 0.0010011291503906252\n",
396 "Epoch [84] Loss: 0.003425112014175743, LR 0.0010011291503906252\n",
397 "Epoch [85] Loss: 0.0032633850549129728, LR 0.0010011291503906252\n",
398 "Epoch [86] Loss: 0.0031762316505959693, LR 0.0010011291503906252\n",
399 "Epoch [87] Loss: 0.0030452777096565734, LR 0.0010011291503906252\n",
400 "Epoch [88] Loss: 0.002950224184220837, LR 0.0010011291503906252\n",
401 "Epoch [89] Loss: 0.002821172171450676, LR 0.0007508468627929689\n",
402 "Epoch [90] Loss: 0.002725780961361337, LR 0.0007508468627929689\n",
403 "Epoch [91] Loss: 0.002660556359493986, LR 0.0007508468627929689\n",
404 "Epoch [92] Loss: 0.0026011724946319414, LR 0.0007508468627929689\n",
405 "Epoch [93] Loss: 0.0025355776256703317, LR 0.0007508468627929689\n",
406 "Epoch [94] Loss: 0.0024825221997626283, LR 0.0007508468627929689\n",
407 "Epoch [95] Loss: 0.0024245587435174497, LR 0.0007508468627929689\n",
408 "Epoch [96] Loss: 0.002365282145879602, LR 0.0007508468627929689\n",
409 "Epoch [97] Loss: 0.0023112583984719946, LR 0.0007508468627929689\n",
410 "Epoch [98] Loss: 0.002257173682780976, LR 0.0007508468627929689\n",
411 "Epoch [99] Loss: 0.002162747085094452, LR 0.0005631351470947267\n"
432 …" print(\"Epoch [{}] Loss: {}, LR {}\".format(e, epoch_loss.asscalar()/(i+1), trainer.learning_…