2023-03-03 08:53:16,888 ---------------------------------------------------------------------------------------------------- 2023-03-03 08:53:16,889 Model: "EntityLinker( (embeddings): TransformerWordEmbeddings( (model): BertModel( (embeddings): BertEmbeddings( (word_embeddings): Embedding(30523, 768) (position_embeddings): Embedding(512, 768) (token_type_embeddings): Embedding(2, 768) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (encoder): BertEncoder( (layer): ModuleList( (0): BertLayer( (attention): BertAttention( (self): BertSelfAttention( (query): Linear(in_features=768, out_features=768, bias=True) (key): Linear(in_features=768, out_features=768, bias=True) (value): Linear(in_features=768, out_features=768, bias=True) (dropout): Dropout(p=0.1, inplace=False) ) (output): BertSelfOutput( (dense): Linear(in_features=768, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (intermediate): BertIntermediate( (dense): Linear(in_features=768, out_features=3072, bias=True) (intermediate_act_fn): GELUActivation() ) (output): BertOutput( (dense): Linear(in_features=3072, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (1): BertLayer( (attention): BertAttention( (self): BertSelfAttention( (query): Linear(in_features=768, out_features=768, bias=True) (key): Linear(in_features=768, out_features=768, bias=True) (value): Linear(in_features=768, out_features=768, bias=True) (dropout): Dropout(p=0.1, inplace=False) ) (output): BertSelfOutput( (dense): Linear(in_features=768, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (intermediate): BertIntermediate( (dense): Linear(in_features=768, out_features=3072, bias=True) (intermediate_act_fn): GELUActivation() ) (output): BertOutput( (dense): Linear(in_features=3072, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (2): BertLayer( (attention): BertAttention( (self): BertSelfAttention( (query): Linear(in_features=768, out_features=768, bias=True) (key): Linear(in_features=768, out_features=768, bias=True) (value): Linear(in_features=768, out_features=768, bias=True) (dropout): Dropout(p=0.1, inplace=False) ) (output): BertSelfOutput( (dense): Linear(in_features=768, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (intermediate): BertIntermediate( (dense): Linear(in_features=768, out_features=3072, bias=True) (intermediate_act_fn): GELUActivation() ) (output): BertOutput( (dense): Linear(in_features=3072, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (3): BertLayer( (attention): BertAttention( (self): BertSelfAttention( (query): Linear(in_features=768, out_features=768, bias=True) (key): Linear(in_features=768, out_features=768, bias=True) (value): Linear(in_features=768, out_features=768, bias=True) (dropout): Dropout(p=0.1, inplace=False) ) (output): BertSelfOutput( (dense): Linear(in_features=768, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (intermediate): BertIntermediate( (dense): Linear(in_features=768, out_features=3072, bias=True) (intermediate_act_fn): GELUActivation() ) (output): BertOutput( (dense): Linear(in_features=3072, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (4): BertLayer( (attention): BertAttention( (self): BertSelfAttention( (query): Linear(in_features=768, out_features=768, bias=True) (key): Linear(in_features=768, out_features=768, bias=True) (value): Linear(in_features=768, out_features=768, bias=True) (dropout): Dropout(p=0.1, inplace=False) ) (output): BertSelfOutput( (dense): Linear(in_features=768, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (intermediate): BertIntermediate( (dense): Linear(in_features=768, out_features=3072, bias=True) (intermediate_act_fn): GELUActivation() ) (output): BertOutput( (dense): Linear(in_features=3072, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (5): BertLayer( (attention): BertAttention( (self): BertSelfAttention( (query): Linear(in_features=768, out_features=768, bias=True) (key): Linear(in_features=768, out_features=768, bias=True) (value): Linear(in_features=768, out_features=768, bias=True) (dropout): Dropout(p=0.1, inplace=False) ) (output): BertSelfOutput( (dense): Linear(in_features=768, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (intermediate): BertIntermediate( (dense): Linear(in_features=768, out_features=3072, bias=True) (intermediate_act_fn): GELUActivation() ) (output): BertOutput( (dense): Linear(in_features=3072, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (6): BertLayer( (attention): BertAttention( (self): BertSelfAttention( (query): Linear(in_features=768, out_features=768, bias=True) (key): Linear(in_features=768, out_features=768, bias=True) (value): Linear(in_features=768, out_features=768, bias=True) (dropout): Dropout(p=0.1, inplace=False) ) (output): BertSelfOutput( (dense): Linear(in_features=768, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (intermediate): BertIntermediate( (dense): Linear(in_features=768, out_features=3072, bias=True) (intermediate_act_fn): GELUActivation() ) (output): BertOutput( (dense): Linear(in_features=3072, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (7): BertLayer( (attention): BertAttention( (self): BertSelfAttention( (query): Linear(in_features=768, out_features=768, bias=True) (key): Linear(in_features=768, out_features=768, bias=True) (value): Linear(in_features=768, out_features=768, bias=True) (dropout): Dropout(p=0.1, inplace=False) ) (output): BertSelfOutput( (dense): Linear(in_features=768, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (intermediate): BertIntermediate( (dense): Linear(in_features=768, out_features=3072, bias=True) (intermediate_act_fn): GELUActivation() ) (output): BertOutput( (dense): Linear(in_features=3072, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (8): BertLayer( (attention): BertAttention( (self): BertSelfAttention( (query): Linear(in_features=768, out_features=768, bias=True) (key): Linear(in_features=768, out_features=768, bias=True) (value): Linear(in_features=768, out_features=768, bias=True) (dropout): Dropout(p=0.1, inplace=False) ) (output): BertSelfOutput( (dense): Linear(in_features=768, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (intermediate): BertIntermediate( (dense): Linear(in_features=768, out_features=3072, bias=True) (intermediate_act_fn): GELUActivation() ) (output): BertOutput( (dense): Linear(in_features=3072, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (9): BertLayer( (attention): BertAttention( (self): BertSelfAttention( (query): Linear(in_features=768, out_features=768, bias=True) (key): Linear(in_features=768, out_features=768, bias=True) (value): Linear(in_features=768, out_features=768, bias=True) (dropout): Dropout(p=0.1, inplace=False) ) (output): BertSelfOutput( (dense): Linear(in_features=768, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (intermediate): BertIntermediate( (dense): Linear(in_features=768, out_features=3072, bias=True) (intermediate_act_fn): GELUActivation() ) (output): BertOutput( (dense): Linear(in_features=3072, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (10): BertLayer( (attention): BertAttention( (self): BertSelfAttention( (query): Linear(in_features=768, out_features=768, bias=True) (key): Linear(in_features=768, out_features=768, bias=True) (value): Linear(in_features=768, out_features=768, bias=True) (dropout): Dropout(p=0.1, inplace=False) ) (output): BertSelfOutput( (dense): Linear(in_features=768, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (intermediate): BertIntermediate( (dense): Linear(in_features=768, out_features=3072, bias=True) (intermediate_act_fn): GELUActivation() ) (output): BertOutput( (dense): Linear(in_features=3072, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (11): BertLayer( (attention): BertAttention( (self): BertSelfAttention( (query): Linear(in_features=768, out_features=768, bias=True) (key): Linear(in_features=768, out_features=768, bias=True) (value): Linear(in_features=768, out_features=768, bias=True) (dropout): Dropout(p=0.1, inplace=False) ) (output): BertSelfOutput( (dense): Linear(in_features=768, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (intermediate): BertIntermediate( (dense): Linear(in_features=768, out_features=3072, bias=True) (intermediate_act_fn): GELUActivation() ) (output): BertOutput( (dense): Linear(in_features=3072, out_features=768, bias=True) (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) ) ) (pooler): BertPooler( (dense): Linear(in_features=768, out_features=768, bias=True) (activation): Tanh() ) ) ) (decoder): PrototypicalDecoder( (distance): NegativeScaledDotProduct() ) (dropout): Dropout(p=0.0, inplace=False) (locked_dropout): LockedDropout(p=0.0) (word_dropout): WordDropout(p=0.0) (loss_function): CrossEntropyLoss() (weights): None (weight_tensor) None )" 2023-03-03 08:53:16,890 ---------------------------------------------------------------------------------------------------- 2023-03-03 08:53:16,890 Corpus: "Corpus: 2025740 train + 225082 dev + 70408 test sentences" 2023-03-03 08:53:16,890 ---------------------------------------------------------------------------------------------------- 2023-03-03 08:53:16,890 Parameters: 2023-03-03 08:53:16,890 - learning_rate: "0.000500,0.000005" 2023-03-03 08:53:16,890 - mini_batch_size: "64" 2023-03-03 08:53:16,890 - patience: "3" 2023-03-03 08:53:16,890 - anneal_factor: "0.5" 2023-03-03 08:53:16,890 - max_epochs: "50" 2023-03-03 08:53:16,890 - shuffle: "True" 2023-03-03 08:53:16,890 - train_with_dev: "True" 2023-03-03 08:53:16,890 - batch_growth_annealing: "False" 2023-03-03 08:53:16,890 ---------------------------------------------------------------------------------------------------- 2023-03-03 08:53:16,890 Model training base path: "resources/taggers/zelda_infinite_prototypes/link+context-bert-base-uncased-lr-5e-06x100-mbz-64-min-4-candidates-False-chunk-16" 2023-03-03 08:53:16,890 ---------------------------------------------------------------------------------------------------- 2023-03-03 08:53:16,890 Device: cuda:2 2023-03-03 08:53:16,890 ---------------------------------------------------------------------------------------------------- 2023-03-03 08:53:16,890 Embeddings storage mode: none 2023-03-03 08:53:16,890 ---------------------------------------------------------------------------------------------------- 2023-03-03 09:25:28,164 epoch 1 - iter 3517/35170 - loss 7.33968989 - time (sec): 1931.27 - samples/sec: 157.30 - lr: 0.000500,0.000005 2023-03-03 09:55:46,979 epoch 1 - iter 7034/35170 - loss 7.27447108 - time (sec): 3750.09 - samples/sec: 149.22 - lr: 0.000500,0.000005 2023-03-03 10:25:46,503 epoch 1 - iter 10551/35170 - loss 7.28195612 - time (sec): 5549.61 - samples/sec: 146.23 - lr: 0.000500,0.000005 2023-03-03 10:55:32,139 epoch 1 - iter 14068/35170 - loss 7.18805255 - time (sec): 7335.25 - samples/sec: 146.01 - lr: 0.000500,0.000005 2023-03-03 11:25:10,208 epoch 1 - iter 17585/35170 - loss 7.12015571 - time (sec): 9113.32 - samples/sec: 145.94 - lr: 0.000500,0.000005 2023-03-03 11:55:05,665 epoch 1 - iter 21102/35170 - loss 6.99208438 - time (sec): 10908.77 - samples/sec: 147.26 - lr: 0.000500,0.000005 2023-03-03 12:21:15,212 epoch 1 - iter 24619/35170 - loss 6.93270972 - time (sec): 12478.32 - samples/sec: 150.85 - lr: 0.000500,0.000005 2023-03-03 12:45:39,061 epoch 1 - iter 28136/35170 - loss 6.82552170 - time (sec): 13942.17 - samples/sec: 150.55 - lr: 0.000500,0.000005 2023-03-03 13:13:54,131 epoch 1 - iter 31653/35170 - loss 6.69019944 - time (sec): 15637.24 - samples/sec: 150.26 - lr: 0.000500,0.000005 2023-03-03 13:43:35,017 epoch 1 - iter 35170/35170 - loss 6.51012213 - time (sec): 17418.13 - samples/sec: 149.90 - lr: 0.000500,0.000005 2023-03-03 13:43:35,018 saving model of current epoch 2023-03-03 13:43:39,163 ---------------------------------------------------------------------------------------------------- 2023-03-03 13:43:39,163 EPOCH 1 done: loss 6.5101 - lr 0.000500,0.000005 2023-03-03 13:45:07,567 Evaluating as a multi-label problem: False 2023-03-03 13:45:07,916 TEST : loss 6.730576992034912 - f1-score (micro avg) 0.2512 2023-03-03 13:45:18,480 ---------------------------------------------------------------------------------------------------- 2023-03-03 14:15:06,443 epoch 2 - iter 3517/35170 - loss 5.17924784 - time (sec): 1787.96 - samples/sec: 144.99 - lr: 0.000500,0.000005 2023-03-03 14:45:03,392 epoch 2 - iter 7034/35170 - loss 5.09280705 - time (sec): 3584.91 - samples/sec: 144.63 - lr: 0.000500,0.000005 2023-03-03 15:14:57,888 epoch 2 - iter 10551/35170 - loss 5.01616657 - time (sec): 5379.41 - samples/sec: 144.76 - lr: 0.000500,0.000005 2023-03-03 15:44:46,553 epoch 2 - iter 14068/35170 - loss 4.94429516 - time (sec): 7168.07 - samples/sec: 144.83 - lr: 0.000500,0.000005 2023-03-03 16:14:53,138 epoch 2 - iter 17585/35170 - loss 4.87229895 - time (sec): 8974.66 - samples/sec: 144.80 - lr: 0.000500,0.000005 2023-03-03 16:44:54,653 epoch 2 - iter 21102/35170 - loss 4.80559632 - time (sec): 10776.17 - samples/sec: 144.81 - lr: 0.000500,0.000005 2023-03-03 17:14:50,403 epoch 2 - iter 24619/35170 - loss 4.74001642 - time (sec): 12571.92 - samples/sec: 144.90 - lr: 0.000500,0.000005 2023-03-03 17:42:05,732 epoch 2 - iter 28136/35170 - loss 4.67919135 - time (sec): 14207.25 - samples/sec: 146.45 - lr: 0.000500,0.000005 2023-03-03 18:10:41,106 epoch 2 - iter 31653/35170 - loss 4.61713166 - time (sec): 15922.63 - samples/sec: 147.27 - lr: 0.000500,0.000005 2023-03-03 18:40:31,534 epoch 2 - iter 35170/35170 - loss 4.55950522 - time (sec): 17713.05 - samples/sec: 147.19 - lr: 0.000500,0.000005 2023-03-03 18:40:31,535 saving model of current epoch 2023-03-03 18:40:36,182 ---------------------------------------------------------------------------------------------------- 2023-03-03 18:40:36,183 EPOCH 2 done: loss 4.5595 - lr 0.000500,0.000005 2023-03-03 18:42:04,857 Evaluating as a multi-label problem: False 2023-03-03 18:42:05,238 TEST : loss 4.818673133850098 - f1-score (micro avg) 0.3798 2023-03-03 18:42:17,986 ---------------------------------------------------------------------------------------------------- 2023-03-03 19:12:05,791 epoch 3 - iter 3517/35170 - loss 3.76039353 - time (sec): 1787.80 - samples/sec: 146.41 - lr: 0.000500,0.000005 2023-03-03 19:42:03,849 epoch 3 - iter 7034/35170 - loss 3.73331830 - time (sec): 3585.86 - samples/sec: 146.06 - lr: 0.000500,0.000005 2023-03-03 20:11:57,750 epoch 3 - iter 10551/35170 - loss 3.71193516 - time (sec): 5379.76 - samples/sec: 145.68 - lr: 0.000500,0.000005 2023-03-03 20:41:49,904 epoch 3 - iter 14068/35170 - loss 3.68052827 - time (sec): 7171.92 - samples/sec: 145.53 - lr: 0.000500,0.000005 2023-03-03 21:11:39,742 epoch 3 - iter 17585/35170 - loss 3.65385938 - time (sec): 8961.76 - samples/sec: 145.51 - lr: 0.000500,0.000005 2023-03-03 21:41:38,125 epoch 3 - iter 21102/35170 - loss 3.62732454 - time (sec): 10760.14 - samples/sec: 145.34 - lr: 0.000500,0.000005 2023-03-03 22:11:26,825 epoch 3 - iter 24619/35170 - loss 3.59814665 - time (sec): 12548.84 - samples/sec: 145.40 - lr: 0.000500,0.000005 2023-03-03 22:41:17,144 epoch 3 - iter 28136/35170 - loss 3.56910504 - time (sec): 14339.16 - samples/sec: 145.43 - lr: 0.000500,0.000005 2023-03-03 23:10:47,001 epoch 3 - iter 31653/35170 - loss 3.54065189 - time (sec): 16109.01 - samples/sec: 145.62 - lr: 0.000500,0.000005 2023-03-03 23:39:36,899 epoch 3 - iter 35170/35170 - loss 3.51214018 - time (sec): 17838.91 - samples/sec: 146.15 - lr: 0.000500,0.000005 2023-03-03 23:39:36,900 saving model of current epoch 2023-03-03 23:39:41,465 ---------------------------------------------------------------------------------------------------- 2023-03-03 23:39:41,465 EPOCH 3 done: loss 3.5121 - lr 0.000500,0.000005 2023-03-03 23:41:09,340 Evaluating as a multi-label problem: False 2023-03-03 23:41:09,729 TEST : loss 3.867715835571289 - f1-score (micro avg) 0.4573 2023-03-03 23:41:22,261 ---------------------------------------------------------------------------------------------------- 2023-03-04 00:10:13,789 epoch 4 - iter 3517/35170 - loss 3.02249138 - time (sec): 1731.53 - samples/sec: 150.55 - lr: 0.000500,0.000005 2023-03-04 00:39:16,588 epoch 4 - iter 7034/35170 - loss 3.00474228 - time (sec): 3474.33 - samples/sec: 150.84 - lr: 0.000500,0.000005 2023-03-04 01:09:04,105 epoch 4 - iter 10551/35170 - loss 2.98767697 - time (sec): 5261.84 - samples/sec: 148.92 - lr: 0.000500,0.000005 2023-03-04 01:38:52,787 epoch 4 - iter 14068/35170 - loss 2.97329390 - time (sec): 7050.53 - samples/sec: 148.15 - lr: 0.000500,0.000005 2023-03-04 02:08:43,378 epoch 4 - iter 17585/35170 - loss 2.95597357 - time (sec): 8841.12 - samples/sec: 147.59 - lr: 0.000500,0.000005 2023-03-04 02:38:43,804 epoch 4 - iter 21102/35170 - loss 2.94411799 - time (sec): 10641.54 - samples/sec: 147.26 - lr: 0.000500,0.000005 2023-03-04 03:08:31,717 epoch 4 - iter 24619/35170 - loss 2.92758539 - time (sec): 12429.46 - samples/sec: 147.14 - lr: 0.000500,0.000005 2023-03-04 03:38:11,132 epoch 4 - iter 28136/35170 - loss 2.91472628 - time (sec): 14208.87 - samples/sec: 146.96 - lr: 0.000500,0.000005 2023-03-04 04:08:02,545 epoch 4 - iter 31653/35170 - loss 2.89972589 - time (sec): 16000.28 - samples/sec: 146.68 - lr: 0.000500,0.000005 2023-03-04 04:37:53,952 epoch 4 - iter 35170/35170 - loss 2.88553185 - time (sec): 17791.69 - samples/sec: 146.54 - lr: 0.000500,0.000005 2023-03-04 04:37:53,953 saving model of current epoch 2023-03-04 04:37:58,527 ---------------------------------------------------------------------------------------------------- 2023-03-04 04:37:58,528 EPOCH 4 done: loss 2.8855 - lr 0.000500,0.000005 2023-03-04 04:39:27,163 Evaluating as a multi-label problem: False 2023-03-04 04:39:27,570 TEST : loss 3.2030837535858154 - f1-score (micro avg) 0.5484 2023-03-04 04:39:40,336 ---------------------------------------------------------------------------------------------------- 2023-03-04 05:09:38,638 epoch 5 - iter 3517/35170 - loss 2.53857645 - time (sec): 1798.30 - samples/sec: 145.02 - lr: 0.000500,0.000005 2023-03-04 05:39:32,721 epoch 5 - iter 7034/35170 - loss 2.53113350 - time (sec): 3592.39 - samples/sec: 144.99 - lr: 0.000500,0.000005 2023-03-04 06:09:16,295 epoch 5 - iter 10551/35170 - loss 2.52315452 - time (sec): 5375.96 - samples/sec: 145.28 - lr: 0.000500,0.000005 2023-03-04 06:39:01,967 epoch 5 - iter 14068/35170 - loss 2.51457284 - time (sec): 7161.63 - samples/sec: 145.38 - lr: 0.000500,0.000005 2023-03-04 07:08:47,685 epoch 5 - iter 17585/35170 - loss 2.50862844 - time (sec): 8947.35 - samples/sec: 145.41 - lr: 0.000500,0.000005 2023-03-04 07:37:12,739 epoch 5 - iter 21102/35170 - loss 2.50472581 - time (sec): 10652.40 - samples/sec: 146.57 - lr: 0.000500,0.000005 2023-03-04 08:06:26,978 epoch 5 - iter 24619/35170 - loss 2.49766897 - time (sec): 12406.64 - samples/sec: 146.83 - lr: 0.000500,0.000005 2023-03-04 08:35:18,181 epoch 5 - iter 28136/35170 - loss 2.48937193 - time (sec): 14137.85 - samples/sec: 147.24 - lr: 0.000500,0.000005 2023-03-04 09:05:03,205 epoch 5 - iter 31653/35170 - loss 2.48408266 - time (sec): 15922.87 - samples/sec: 147.21 - lr: 0.000500,0.000005 2023-03-04 09:34:57,137 epoch 5 - iter 35170/35170 - loss 2.47876511 - time (sec): 17716.80 - samples/sec: 147.16 - lr: 0.000500,0.000005 2023-03-04 09:34:57,138 saving model of current epoch 2023-03-04 09:35:01,748 ---------------------------------------------------------------------------------------------------- 2023-03-04 09:35:01,749 EPOCH 5 done: loss 2.4788 - lr 0.000500,0.000005 2023-03-04 09:36:29,925 Evaluating as a multi-label problem: False 2023-03-04 09:36:30,342 TEST : loss 2.774787187576294 - f1-score (micro avg) 0.6159 2023-03-04 09:36:40,861 ---------------------------------------------------------------------------------------------------- 2023-03-04 10:06:33,485 epoch 6 - iter 3517/35170 - loss 2.20099117 - time (sec): 1792.62 - samples/sec: 145.71 - lr: 0.000500,0.000005 2023-03-04 10:36:19,031 epoch 6 - iter 7034/35170 - loss 2.20825464 - time (sec): 3578.17 - samples/sec: 145.93 - lr: 0.000500,0.000005 2023-03-04 11:06:12,136 epoch 6 - iter 10551/35170 - loss 2.21167154 - time (sec): 5371.28 - samples/sec: 145.66 - lr: 0.000500,0.000005 2023-03-04 11:35:53,763 epoch 6 - iter 14068/35170 - loss 2.21221572 - time (sec): 7152.90 - samples/sec: 145.52 - lr: 0.000500,0.000005 2023-03-04 12:05:36,675 epoch 6 - iter 17585/35170 - loss 2.21205677 - time (sec): 8935.81 - samples/sec: 145.61 - lr: 0.000500,0.000005 2023-03-04 12:35:35,080 epoch 6 - iter 21102/35170 - loss 2.20914029 - time (sec): 10734.22 - samples/sec: 145.54 - lr: 0.000500,0.000005 2023-03-04 13:05:28,940 epoch 6 - iter 24619/35170 - loss 2.20827187 - time (sec): 12528.08 - samples/sec: 145.66 - lr: 0.000500,0.000005 2023-03-04 13:35:11,580 epoch 6 - iter 28136/35170 - loss 2.20568772 - time (sec): 14310.72 - samples/sec: 145.69 - lr: 0.000500,0.000005 2023-03-04 14:05:04,949 epoch 6 - iter 31653/35170 - loss 2.20542844 - time (sec): 16104.09 - samples/sec: 145.71 - lr: 0.000500,0.000005 2023-03-04 14:34:49,178 epoch 6 - iter 35170/35170 - loss 2.20327336 - time (sec): 17888.32 - samples/sec: 145.75 - lr: 0.000500,0.000005 2023-03-04 14:34:49,179 saving model of current epoch 2023-03-04 14:34:53,566 ---------------------------------------------------------------------------------------------------- 2023-03-04 14:34:53,567 EPOCH 6 done: loss 2.2033 - lr 0.000500,0.000005 2023-03-04 14:36:22,091 Evaluating as a multi-label problem: False 2023-03-04 14:36:22,506 TEST : loss 2.5389952659606934 - f1-score (micro avg) 0.6483 2023-03-04 14:36:35,145 ---------------------------------------------------------------------------------------------------- 2023-03-04 15:06:24,494 epoch 7 - iter 3517/35170 - loss 1.99132900 - time (sec): 1789.35 - samples/sec: 145.32 - lr: 0.000500,0.000005 2023-03-04 15:36:25,466 epoch 7 - iter 7034/35170 - loss 1.99268880 - time (sec): 3590.32 - samples/sec: 146.01 - lr: 0.000500,0.000005 2023-03-04 16:06:13,827 epoch 7 - iter 10551/35170 - loss 2.00009822 - time (sec): 5378.68 - samples/sec: 145.79 - lr: 0.000500,0.000005 2023-03-04 16:35:51,941 epoch 7 - iter 14068/35170 - loss 1.99946407 - time (sec): 7156.80 - samples/sec: 145.64 - lr: 0.000500,0.000005 2023-03-04 17:05:32,950 epoch 7 - iter 17585/35170 - loss 2.00195555 - time (sec): 8937.81 - samples/sec: 145.67 - lr: 0.000500,0.000005 2023-03-04 17:35:24,092 epoch 7 - iter 21102/35170 - loss 2.00389993 - time (sec): 10728.95 - samples/sec: 145.72 - lr: 0.000500,0.000005 2023-03-04 18:05:13,466 epoch 7 - iter 24619/35170 - loss 2.00701298 - time (sec): 12518.32 - samples/sec: 145.70 - lr: 0.000500,0.000005 2023-03-04 18:34:58,616 epoch 7 - iter 28136/35170 - loss 2.01124533 - time (sec): 14303.47 - samples/sec: 145.72 - lr: 0.000500,0.000005 2023-03-04 19:04:44,598 epoch 7 - iter 31653/35170 - loss 2.01193267 - time (sec): 16089.45 - samples/sec: 145.82 - lr: 0.000500,0.000005 2023-03-04 19:34:33,435 epoch 7 - iter 35170/35170 - loss 2.01301801 - time (sec): 17878.29 - samples/sec: 145.83 - lr: 0.000500,0.000005 2023-03-04 19:34:33,436 saving model of current epoch 2023-03-04 19:34:38,116 ---------------------------------------------------------------------------------------------------- 2023-03-04 19:34:38,117 EPOCH 7 done: loss 2.0130 - lr 0.000500,0.000005 2023-03-04 19:36:06,431 Evaluating as a multi-label problem: False 2023-03-04 19:36:06,846 TEST : loss 2.445143938064575 - f1-score (micro avg) 0.6627 2023-03-04 19:36:19,551 ---------------------------------------------------------------------------------------------------- 2023-03-04 20:05:59,006 epoch 8 - iter 3517/35170 - loss 1.82460923 - time (sec): 1779.45 - samples/sec: 146.07 - lr: 0.000500,0.000005 2023-03-04 20:35:42,690 epoch 8 - iter 7034/35170 - loss 1.84012624 - time (sec): 3563.14 - samples/sec: 145.87 - lr: 0.000500,0.000005 2023-03-04 21:05:28,196 epoch 8 - iter 10551/35170 - loss 1.84766606 - time (sec): 5348.64 - samples/sec: 146.10 - lr: 0.000500,0.000005 2023-03-04 21:35:20,030 epoch 8 - iter 14068/35170 - loss 1.85624245 - time (sec): 7140.48 - samples/sec: 146.06 - lr: 0.000500,0.000005 2023-03-04 22:04:50,314 epoch 8 - iter 17585/35170 - loss 1.85923098 - time (sec): 8910.76 - samples/sec: 146.40 - lr: 0.000500,0.000005 2023-03-04 22:33:30,588 epoch 8 - iter 21102/35170 - loss 1.86471536 - time (sec): 10631.04 - samples/sec: 147.30 - lr: 0.000500,0.000005 2023-03-04 23:02:11,111 epoch 8 - iter 24619/35170 - loss 1.86909285 - time (sec): 12351.56 - samples/sec: 147.81 - lr: 0.000500,0.000005 2023-03-04 23:30:51,668 epoch 8 - iter 28136/35170 - loss 1.87227382 - time (sec): 14072.12 - samples/sec: 148.15 - lr: 0.000500,0.000005 2023-03-04 23:59:47,366 epoch 8 - iter 31653/35170 - loss 1.87741421 - time (sec): 15807.81 - samples/sec: 148.39 - lr: 0.000500,0.000005 2023-03-05 00:28:39,676 epoch 8 - iter 35170/35170 - loss 1.88029819 - time (sec): 17540.12 - samples/sec: 148.64 - lr: 0.000500,0.000005 2023-03-05 00:28:39,677 saving model of current epoch 2023-03-05 00:28:44,035 ---------------------------------------------------------------------------------------------------- 2023-03-05 00:28:44,035 EPOCH 8 done: loss 1.8803 - lr 0.000500,0.000005 2023-03-05 00:30:12,451 Evaluating as a multi-label problem: False 2023-03-05 00:30:12,876 TEST : loss 2.3123185634613037 - f1-score (micro avg) 0.6889 2023-03-05 00:30:25,558 ---------------------------------------------------------------------------------------------------- 2023-03-05 00:55:55,051 epoch 9 - iter 3517/35170 - loss 1.72347699 - time (sec): 1529.49 - samples/sec: 170.28 - lr: 0.000500,0.000005 2023-03-05 01:21:23,223 epoch 9 - iter 7034/35170 - loss 1.73050163 - time (sec): 3057.67 - samples/sec: 170.50 - lr: 0.000500,0.000005 2023-03-05 01:50:55,969 epoch 9 - iter 10551/35170 - loss 1.74289307 - time (sec): 4830.41 - samples/sec: 162.02 - lr: 0.000500,0.000005 2023-03-05 02:20:49,163 epoch 9 - iter 14068/35170 - loss 1.74719918 - time (sec): 6623.61 - samples/sec: 157.51 - lr: 0.000500,0.000005 2023-03-05 02:50:33,862 epoch 9 - iter 17585/35170 - loss 1.75510121 - time (sec): 8408.30 - samples/sec: 154.96 - lr: 0.000500,0.000005 2023-03-05 03:20:11,098 epoch 9 - iter 21102/35170 - loss 1.76130930 - time (sec): 10185.54 - samples/sec: 153.54 - lr: 0.000500,0.000005 2023-03-05 03:49:57,887 epoch 9 - iter 24619/35170 - loss 1.76596857 - time (sec): 11972.33 - samples/sec: 152.36 - lr: 0.000500,0.000005 2023-03-05 04:19:49,522 epoch 9 - iter 28136/35170 - loss 1.76997900 - time (sec): 13763.96 - samples/sec: 151.54 - lr: 0.000500,0.000005 2023-03-05 04:49:42,750 epoch 9 - iter 31653/35170 - loss 1.77598012 - time (sec): 15557.19 - samples/sec: 150.86 - lr: 0.000500,0.000005 2023-03-05 05:19:30,242 epoch 9 - iter 35170/35170 - loss 1.78112051 - time (sec): 17344.68 - samples/sec: 150.31 - lr: 0.000500,0.000005 2023-03-05 05:19:30,243 saving model of current epoch 2023-03-05 05:19:34,255 ---------------------------------------------------------------------------------------------------- 2023-03-05 05:19:34,255 EPOCH 9 done: loss 1.7811 - lr 0.000500,0.000005 2023-03-05 05:21:02,265 Evaluating as a multi-label problem: False 2023-03-05 05:21:02,693 TEST : loss 2.2485833168029785 - f1-score (micro avg) 0.7018 2023-03-05 05:21:13,216 ---------------------------------------------------------------------------------------------------- 2023-03-05 05:50:50,695 epoch 10 - iter 3517/35170 - loss 1.63002813 - time (sec): 1777.48 - samples/sec: 146.19 - lr: 0.000500,0.000005 2023-03-05 06:20:32,087 epoch 10 - iter 7034/35170 - loss 1.64969375 - time (sec): 3558.87 - samples/sec: 146.46 - lr: 0.000500,0.000005 2023-03-05 06:49:59,891 epoch 10 - iter 10551/35170 - loss 1.66190300 - time (sec): 5326.67 - samples/sec: 146.58 - lr: 0.000500,0.000005 2023-03-05 07:19:35,633 epoch 10 - iter 14068/35170 - loss 1.67181842 - time (sec): 7102.42 - samples/sec: 146.68 - lr: 0.000500,0.000005 2023-03-05 07:48:59,030 epoch 10 - iter 17585/35170 - loss 1.67925412 - time (sec): 8865.81 - samples/sec: 146.83 - lr: 0.000500,0.000005 2023-03-05 08:18:28,252 epoch 10 - iter 21102/35170 - loss 1.68413452 - time (sec): 10635.04 - samples/sec: 146.88 - lr: 0.000500,0.000005 2023-03-05 08:48:02,669 epoch 10 - iter 24619/35170 - loss 1.69064609 - time (sec): 12409.45 - samples/sec: 146.82 - lr: 0.000500,0.000005 2023-03-05 09:17:32,024 epoch 10 - iter 28136/35170 - loss 1.69755881 - time (sec): 14178.81 - samples/sec: 147.05 - lr: 0.000500,0.000005 2023-03-05 09:47:11,217 epoch 10 - iter 31653/35170 - loss 1.70287522 - time (sec): 15958.00 - samples/sec: 147.05 - lr: 0.000500,0.000005 2023-03-05 10:16:38,101 epoch 10 - iter 35170/35170 - loss 1.70820406 - time (sec): 17724.88 - samples/sec: 147.09 - lr: 0.000500,0.000005 2023-03-05 10:16:38,102 saving model of current epoch 2023-03-05 10:16:42,534 ---------------------------------------------------------------------------------------------------- 2023-03-05 10:16:42,535 EPOCH 10 done: loss 1.7082 - lr 0.000500,0.000005 2023-03-05 10:18:10,995 Evaluating as a multi-label problem: False 2023-03-05 10:18:11,419 TEST : loss 2.1985042095184326 - f1-score (micro avg) 0.7074 2023-03-05 10:18:24,037 ---------------------------------------------------------------------------------------------------- 2023-03-05 10:47:47,058 epoch 11 - iter 3517/35170 - loss 1.57280284 - time (sec): 1763.02 - samples/sec: 146.70 - lr: 0.000500,0.000005 2023-03-05 11:17:07,497 epoch 11 - iter 7034/35170 - loss 1.58292872 - time (sec): 3523.46 - samples/sec: 147.64 - lr: 0.000500,0.000005 2023-03-05 11:46:51,173 epoch 11 - iter 10551/35170 - loss 1.59736075 - time (sec): 5307.14 - samples/sec: 146.79 - lr: 0.000500,0.000005 2023-03-05 12:16:15,616 epoch 11 - iter 14068/35170 - loss 1.60536435 - time (sec): 7071.58 - samples/sec: 146.81 - lr: 0.000500,0.000005 2023-03-05 12:45:34,547 epoch 11 - iter 17585/35170 - loss 1.61641307 - time (sec): 8830.51 - samples/sec: 147.09 - lr: 0.000500,0.000005 2023-03-05 13:15:10,776 epoch 11 - iter 21102/35170 - loss 1.62427225 - time (sec): 10606.74 - samples/sec: 147.12 - lr: 0.000500,0.000005 2023-03-05 13:44:51,293 epoch 11 - iter 24619/35170 - loss 1.63028371 - time (sec): 12387.26 - samples/sec: 147.11 - lr: 0.000500,0.000005 2023-03-05 14:14:16,751 epoch 11 - iter 28136/35170 - loss 1.63810533 - time (sec): 14152.71 - samples/sec: 147.19 - lr: 0.000500,0.000005 2023-03-05 14:43:41,074 epoch 11 - iter 31653/35170 - loss 1.64578998 - time (sec): 15917.04 - samples/sec: 147.40 - lr: 0.000500,0.000005 2023-03-05 15:13:12,591 epoch 11 - iter 35170/35170 - loss 1.65233891 - time (sec): 17688.55 - samples/sec: 147.39 - lr: 0.000500,0.000005 2023-03-05 15:13:12,591 saving model of current epoch 2023-03-05 15:13:17,270 ---------------------------------------------------------------------------------------------------- 2023-03-05 15:13:17,270 EPOCH 11 done: loss 1.6523 - lr 0.000500,0.000005 2023-03-05 15:14:45,493 Evaluating as a multi-label problem: False 2023-03-05 15:14:45,919 TEST : loss 2.2193498611450195 - f1-score (micro avg) 0.7127 2023-03-05 15:14:58,623 ---------------------------------------------------------------------------------------------------- 2023-03-05 15:44:31,026 epoch 12 - iter 3517/35170 - loss 1.51967276 - time (sec): 1772.40 - samples/sec: 146.64 - lr: 0.000500,0.000005 2023-03-05 16:13:57,966 epoch 12 - iter 7034/35170 - loss 1.54185593 - time (sec): 3539.34 - samples/sec: 147.05 - lr: 0.000500,0.000005 2023-03-05 16:43:31,756 epoch 12 - iter 10551/35170 - loss 1.55629349 - time (sec): 5313.13 - samples/sec: 147.15 - lr: 0.000500,0.000005 2023-03-05 17:13:02,233 epoch 12 - iter 14068/35170 - loss 1.56663025 - time (sec): 7083.61 - samples/sec: 147.24 - lr: 0.000500,0.000005 2023-03-05 17:42:33,371 epoch 12 - iter 17585/35170 - loss 1.57454252 - time (sec): 8854.75 - samples/sec: 147.28 - lr: 0.000500,0.000005 2023-03-05 18:12:12,765 epoch 12 - iter 21102/35170 - loss 1.58148599 - time (sec): 10634.14 - samples/sec: 147.09 - lr: 0.000500,0.000005 2023-03-05 18:41:36,365 epoch 12 - iter 24619/35170 - loss 1.58971076 - time (sec): 12397.74 - samples/sec: 147.12 - lr: 0.000500,0.000005 2023-03-05 19:11:18,346 epoch 12 - iter 28136/35170 - loss 1.59576441 - time (sec): 14179.72 - samples/sec: 147.12 - lr: 0.000500,0.000005 2023-03-05 19:40:55,072 epoch 12 - iter 31653/35170 - loss 1.60261893 - time (sec): 15956.45 - samples/sec: 147.05 - lr: 0.000500,0.000005 2023-03-05 20:10:36,978 epoch 12 - iter 35170/35170 - loss 1.61023570 - time (sec): 17738.35 - samples/sec: 146.98 - lr: 0.000500,0.000005 2023-03-05 20:10:36,979 saving model of current epoch 2023-03-05 20:10:41,509 ---------------------------------------------------------------------------------------------------- 2023-03-05 20:10:41,510 EPOCH 12 done: loss 1.6102 - lr 0.000500,0.000005 2023-03-05 20:12:10,186 Evaluating as a multi-label problem: False 2023-03-05 20:12:10,617 TEST : loss 2.1637091636657715 - f1-score (micro avg) 0.7224 2023-03-05 20:12:21,228 ---------------------------------------------------------------------------------------------------- 2023-03-05 20:41:55,096 epoch 13 - iter 3517/35170 - loss 1.49276830 - time (sec): 1773.87 - samples/sec: 146.85 - lr: 0.000500,0.000005 2023-03-05 21:11:29,491 epoch 13 - iter 7034/35170 - loss 1.50693964 - time (sec): 3548.26 - samples/sec: 146.71 - lr: 0.000500,0.000005 2023-03-05 21:40:52,943 epoch 13 - iter 10551/35170 - loss 1.51826705 - time (sec): 5311.72 - samples/sec: 146.93 - lr: 0.000500,0.000005 2023-03-05 22:10:18,586 epoch 13 - iter 14068/35170 - loss 1.52970231 - time (sec): 7077.36 - samples/sec: 147.27 - lr: 0.000500,0.000005 2023-03-05 22:39:46,018 epoch 13 - iter 17585/35170 - loss 1.54128927 - time (sec): 8844.79 - samples/sec: 147.30 - lr: 0.000500,0.000005 2023-03-05 23:09:10,088 epoch 13 - iter 21102/35170 - loss 1.55057650 - time (sec): 10608.86 - samples/sec: 147.37 - lr: 0.000500,0.000005 2023-03-05 23:38:38,366 epoch 13 - iter 24619/35170 - loss 1.55743398 - time (sec): 12377.14 - samples/sec: 147.36 - lr: 0.000500,0.000005 2023-03-06 00:08:15,044 epoch 13 - iter 28136/35170 - loss 1.56455221 - time (sec): 14153.82 - samples/sec: 147.24 - lr: 0.000500,0.000005 2023-03-06 00:37:46,257 epoch 13 - iter 31653/35170 - loss 1.57245847 - time (sec): 15925.03 - samples/sec: 147.29 - lr: 0.000500,0.000005 2023-03-06 01:07:11,506 epoch 13 - iter 35170/35170 - loss 1.57813337 - time (sec): 17690.28 - samples/sec: 147.38 - lr: 0.000500,0.000005 2023-03-06 01:07:11,507 saving model of current epoch 2023-03-06 01:07:15,931 ---------------------------------------------------------------------------------------------------- 2023-03-06 01:07:15,931 EPOCH 13 done: loss 1.5781 - lr 0.000500,0.000005 2023-03-06 01:08:44,776 Evaluating as a multi-label problem: False 2023-03-06 01:08:45,206 TEST : loss 2.136256217956543 - f1-score (micro avg) 0.7311 2023-03-06 01:08:57,919 ---------------------------------------------------------------------------------------------------- 2023-03-06 01:38:23,522 epoch 14 - iter 3517/35170 - loss 1.46978751 - time (sec): 1765.60 - samples/sec: 146.93 - lr: 0.000500,0.000005 2023-03-06 02:08:00,247 epoch 14 - iter 7034/35170 - loss 1.48132730 - time (sec): 3542.33 - samples/sec: 146.88 - lr: 0.000500,0.000005 2023-03-06 02:37:28,665 epoch 14 - iter 10551/35170 - loss 1.49266013 - time (sec): 5310.75 - samples/sec: 147.14 - lr: 0.000500,0.000005 2023-03-06 03:06:59,360 epoch 14 - iter 14068/35170 - loss 1.50311884 - time (sec): 7081.44 - samples/sec: 147.11 - lr: 0.000500,0.000005 2023-03-06 03:36:28,202 epoch 14 - iter 17585/35170 - loss 1.50830027 - time (sec): 8850.28 - samples/sec: 147.15 - lr: 0.000500,0.000005 2023-03-06 04:06:06,415 epoch 14 - iter 21102/35170 - loss 1.51897539 - time (sec): 10628.50 - samples/sec: 147.22 - lr: 0.000500,0.000005 2023-03-06 04:35:35,480 epoch 14 - iter 24619/35170 - loss 1.52605459 - time (sec): 12397.56 - samples/sec: 147.17 - lr: 0.000500,0.000005 2023-03-06 05:05:10,230 epoch 14 - iter 28136/35170 - loss 1.53598720 - time (sec): 14172.31 - samples/sec: 147.04 - lr: 0.000500,0.000005 2023-03-06 05:34:55,628 epoch 14 - iter 31653/35170 - loss 1.54423309 - time (sec): 15957.71 - samples/sec: 147.03 - lr: 0.000500,0.000005 2023-03-06 06:04:40,696 epoch 14 - iter 35170/35170 - loss 1.55234255 - time (sec): 17742.78 - samples/sec: 146.94 - lr: 0.000500,0.000005 2023-03-06 06:04:40,697 saving model of current epoch 2023-03-06 06:04:45,726 ---------------------------------------------------------------------------------------------------- 2023-03-06 06:04:45,726 EPOCH 14 done: loss 1.5523 - lr 0.000500,0.000005 2023-03-06 06:06:14,312 Evaluating as a multi-label problem: False 2023-03-06 06:06:14,738 TEST : loss 2.156961679458618 - f1-score (micro avg) 0.7281 2023-03-06 06:06:27,240 ---------------------------------------------------------------------------------------------------- 2023-03-06 06:36:00,841 epoch 15 - iter 3517/35170 - loss 1.44119512 - time (sec): 1773.60 - samples/sec: 147.77 - lr: 0.000500,0.000005 2023-03-06 07:05:41,633 epoch 15 - iter 7034/35170 - loss 1.45607922 - time (sec): 3554.39 - samples/sec: 147.38 - lr: 0.000500,0.000005 2023-03-06 07:35:14,908 epoch 15 - iter 10551/35170 - loss 1.46652747 - time (sec): 5327.67 - samples/sec: 147.31 - lr: 0.000500,0.000005 2023-03-06 08:04:40,202 epoch 15 - iter 14068/35170 - loss 1.47777526 - time (sec): 7092.96 - samples/sec: 147.61 - lr: 0.000500,0.000005 2023-03-06 08:34:08,553 epoch 15 - iter 17585/35170 - loss 1.48829362 - time (sec): 8861.31 - samples/sec: 147.68 - lr: 0.000500,0.000005 2023-03-06 09:03:28,801 epoch 15 - iter 21102/35170 - loss 1.49748734 - time (sec): 10621.56 - samples/sec: 147.51 - lr: 0.000500,0.000005 2023-03-06 09:32:45,898 epoch 15 - iter 24619/35170 - loss 1.50463029 - time (sec): 12378.66 - samples/sec: 147.55 - lr: 0.000500,0.000005