loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 ***************************************** Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. ***************************************** loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: loaded library: loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1/usr/lib/x86_64-linux-gnu/libibverbs.so.1/usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 ------------------------ arguments ------------------------ batches_per_epoch ............................... 312 channel_last .................................... True ddp ............................................. False exit_num ........................................ 300 fuse_bn_add_relu ................................ True fuse_bn_relu .................................... True gpu_stat_file ................................... None grad_clipping ................................... 0.0 graph ........................................... True label_smoothing ................................. 0.1 learning_rate ................................... 4.096 legacy_init ..................................... False load_path ....................................... None lr_decay_type ................................... cosine metric_local .................................... True metric_train_acc ................................ True momentum ........................................ 0.875 nccl_fusion_max_ops ............................. 24 nccl_fusion_threshold_mb ........................ 16 num_classes ..................................... 1000 num_devices_per_node ............................ 8 num_epochs ...................................... 1 num_nodes ....................................... 1 ofrecord_part_num ............................... 256 ofrecord_path ................................... /dataset/79846248 print_interval .................................. 100 print_timestamp ................................. False samples_per_epoch ............................... 1281167 save_init ....................................... False save_path ....................................... None scale_grad ...................................... True skip_eval ....................................... True synthetic_data .................................. False total_batches ................................... -1 train_batch_size ................................ 512 train_global_batch_size ......................... 4096 use_fp16 ........................................ True use_gpu_decode .................................. True val_batch_size .................................. 50 val_batches_per_epoch ........................... 125 val_global_batch_size ........................... 400 val_samples_per_epoch ........................... 50000 warmup_epochs ................................... 5 weight_decay .................................... 3.0517578125e-05 zero_init_residual .............................. True -------------------- end of arguments --------------------- ***** Model Init ***** ***** Model Init Finish, time escapled: 3.01426 s ***** [rank:1] [train], epoch: 0/1, iter: 100/312, loss: 0.86737, top1: 0.00096, throughput: 423.45 | 2022-04-28 10:30:32.353 [rank:6] [train], epoch: 0/1, iter: 100/312, loss: 0.86738, top1: 0.00104, throughput: 423.47 | 2022-04-28 10:30:32.352 [rank:7] [train], epoch: 0/1, iter: 100/312, loss: 0.86712, top1: 0.00111, throughput: 423.45 | 2022-04-28 10:30:32.353 [rank:0] [train], epoch: 0/1, iter: 100/312, loss: 0.86734, top1: 0.00117, throughput: 423.45 | 2022-04-28 10:30:32.353 [rank:2] [train], epoch: 0/1, iter: 100/312, loss: 0.86748, top1: 0.00105, throughput: 423.48 | 2022-04-28 10:30:32.352 [rank:5] [train], epoch: 0/1, iter: 100/312, loss: 0.86731, top1: 0.00104, throughput: 423.44 | 2022-04-28 10:30:32.357 [rank:3] [train], epoch: 0/1, iter: 100/312, loss: 0.86719, top1: 0.00115, throughput: 423.47 | 2022-04-28 10:30:32.352 [rank:4] [train], epoch: 0/1, iter: 100/312, loss: 0.86706, top1: 0.00131, throughput: 423.44 | 2022-04-28 10:30:32.358 timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/04/28 10:30:32.578, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 75 %, 32510 MiB, 7789 MiB, 24721 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/04/28 10:30:32.583, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 68 %, 32510 MiB, 7806 MiB, 24704 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/04/28 10:30:32.584, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 75 %, 32510 MiB, 7789 MiB, 24721 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/04/28 10:30:32.585, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 75 %, 32510 MiB, 7789 MiB, 24721 MiB 2022/04/28 10:30:32.585, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 75 %, 32510 MiB, 7789 MiB, 24721 MiB 2022/04/28 10:30:32.591, Tesla V100-SXM2-32GB, 470.57.02, 92 %, 82 %, 32510 MiB, 7958 MiB, 24552 MiB 2022/04/28 10:30:32.591, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 75 %, 32510 MiB, 7789 MiB, 24721 MiB 2022/04/28 10:30:32.595, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 75 %, 32510 MiB, 7789 MiB, 24721 MiB 2022/04/28 10:30:32.596, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 68 %, 32510 MiB, 7806 MiB, 24704 MiB 2022/04/28 10:30:32.596, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 75 %, 32510 MiB, 7789 MiB, 24721 MiB 2022/04/28 10:30:32.597, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 68 %, 32510 MiB, 7806 MiB, 24704 MiB 2022/04/28 10:30:32.598, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 68 %, 32510 MiB, 7806 MiB, 24704 MiB 2022/04/28 10:30:32.601, Tesla V100-SXM2-32GB, 470.57.02, 87 %, 79 %, 32510 MiB, 7908 MiB, 24602 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/04/28 10:30:32.601, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 68 %, 32510 MiB, 7806 MiB, 24704 MiB 2022/04/28 10:30:32.603, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 68 %, 32510 MiB, 7806 MiB, 24704 MiB 2022/04/28 10:30:32.603, Tesla V100-SXM2-32GB, 470.57.02, 92 %, 82 %, 32510 MiB, 7958 MiB, 24552 MiB 2022/04/28 10:30:32.604, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 68 %, 32510 MiB, 7806 MiB, 24704 MiB 2022/04/28 10:30:32.604, Tesla V100-SXM2-32GB, 470.57.02, 92 %, 82 %, 32510 MiB, 7958 MiB, 24552 MiB 2022/04/28 10:30:32.605, Tesla V100-SXM2-32GB, 470.57.02, 92 %, 82 %, 32510 MiB, 7958 MiB, 24552 MiB 2022/04/28 10:30:32.607, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/28 10:30:32.608, Tesla V100-SXM2-32GB, 470.57.02, 92 %, 82 %, 32510 MiB, 7958 MiB, 24552 MiB 2022/04/28 10:30:32.608, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 75 %, 32510 MiB, 7789 MiB, 24721 MiB 2022/04/28 10:30:32.610, Tesla V100-SXM2-32GB, 470.57.02, 92 %, 82 %, 32510 MiB, 7958 MiB, 24552 MiB 2022/04/28 10:30:32.610, Tesla V100-SXM2-32GB, 470.57.02, 87 %, 79 %, 32510 MiB, 7908 MiB, 24602 MiB 2022/04/28 10:30:32.611, Tesla V100-SXM2-32GB, 470.57.02, 92 %, 82 %, 32510 MiB, 7958 MiB, 24552 MiB 2022/04/28 10:30:32.611, Tesla V100-SXM2-32GB, 470.57.02, 87 %, 79 %, 32510 MiB, 7908 MiB, 24602 MiB 2022/04/28 10:30:32.612, Tesla V100-SXM2-32GB, 470.57.02, 87 %, 79 %, 32510 MiB, 7908 MiB, 24602 MiB 2022/04/28 10:30:32.615, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7802 MiB, 24708 MiB 2022/04/28 10:30:32.616, Tesla V100-SXM2-32GB, 470.57.02, 87 %, 79 %, 32510 MiB, 7908 MiB, 24602 MiB 2022/04/28 10:30:32.616, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 68 %, 32510 MiB, 7806 MiB, 24704 MiB 2022/04/28 10:30:32.617, Tesla V100-SXM2-32GB, 470.57.02, 87 %, 79 %, 32510 MiB, 7908 MiB, 24602 MiB 2022/04/28 10:30:32.618, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/28 10:30:32.619, Tesla V100-SXM2-32GB, 470.57.02, 87 %, 79 %, 32510 MiB, 7908 MiB, 24602 MiB 2022/04/28 10:30:32.619, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/28 10:30:32.620, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/28 10:30:32.623, Tesla V100-SXM2-32GB, 470.57.02, 94 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/04/28 10:30:32.623, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/28 10:30:32.624, Tesla V100-SXM2-32GB, 470.57.02, 92 %, 82 %, 32510 MiB, 7958 MiB, 24552 MiB 2022/04/28 10:30:32.625, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/28 10:30:32.626, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7802 MiB, 24708 MiB 2022/04/28 10:30:32.627, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/28 10:30:32.627, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7802 MiB, 24708 MiB 2022/04/28 10:30:32.628, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7802 MiB, 24708 MiB 2022/04/28 10:30:32.631, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7828 MiB, 24682 MiB 2022/04/28 10:30:32.632, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7802 MiB, 24708 MiB 2022/04/28 10:30:32.632, Tesla V100-SXM2-32GB, 470.57.02, 87 %, 79 %, 32510 MiB, 7908 MiB, 24602 MiB 2022/04/28 10:30:32.634, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7802 MiB, 24708 MiB 2022/04/28 10:30:32.634, Tesla V100-SXM2-32GB, 470.57.02, 94 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/04/28 10:30:32.636, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7802 MiB, 24708 MiB 2022/04/28 10:30:32.636, Tesla V100-SXM2-32GB, 470.57.02, 94 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/04/28 10:30:32.637, Tesla V100-SXM2-32GB, 470.57.02, 94 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/04/28 10:30:32.641, Tesla V100-SXM2-32GB, 470.57.02, 94 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/04/28 10:30:32.641, Tesla V100-SXM2-32GB, 470.57.02, 67 %, 58 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/28 10:30:32.643, Tesla V100-SXM2-32GB, 470.57.02, 94 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/04/28 10:30:32.643, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7828 MiB, 24682 MiB 2022/04/28 10:30:32.645, Tesla V100-SXM2-32GB, 470.57.02, 94 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/04/28 10:30:32.645, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7828 MiB, 24682 MiB 2022/04/28 10:30:32.646, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7828 MiB, 24682 MiB 2022/04/28 10:30:32.651, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7828 MiB, 24682 MiB 2022/04/28 10:30:32.656, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7802 MiB, 24708 MiB 2022/04/28 10:30:32.658, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7828 MiB, 24682 MiB 2022/04/28 10:30:32.659, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7828 MiB, 24682 MiB 2022/04/28 10:30:32.664, Tesla V100-SXM2-32GB, 470.57.02, 94 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/04/28 10:30:32.671, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 85 %, 32510 MiB, 7828 MiB, 24682 MiB [rank:2] [train], epoch: 0/1, iter: 200/312, loss: 0.86726, top1: 0.00109, throughput: 1380.80 | 2022-04-28 10:31:09.432 [rank:6] [train], epoch: 0/1, iter: 200/312, loss: 0.86719, top1: 0.00141, throughput: 1380.82 | 2022-04-28 10:31:09.432 [rank:7] [train], epoch: 0/1, iter: 200/312, loss: 0.86736, top1: 0.00086, throughput: 1380.83 | 2022-04-28 10:31:09.432 [rank:3] [train], epoch: 0/1, iter: 200/312, loss: 0.86708, top1: 0.00133, throughput: 1380.80 | 2022-04-28 10:31:09.432 [rank:5] [train], epoch: 0/1, iter: 200/312, loss: 0.86714, top1: 0.00119, throughput: 1380.80 | 2022-04-28 10:31:09.437 [rank:4] [train], epoch: 0/1, iter: 200/312, loss: 0.86702, top1: 0.00100, throughput: 1380.82 | 2022-04-28 10:31:09.437 [rank:1] [train], epoch: 0/1, iter: 200/312, loss: 0.86733, top1: 0.00115, throughput: 1380.67 | 2022-04-28 10:31:09.437 [rank:0] [train], epoch: 0/1, iter: 200/312, loss: 0.86745, top1: 0.00117, throughput: 1380.02 | 2022-04-28 10:31:09.454 [rank:6] [train], epoch: 0/1, iter: 300/312, loss: 0.86721, top1: 0.00109, throughput: 1337.42 | 2022-04-28 10:31:47.714 [rank:7] [train], epoch: 0/1, iter: 300/312, loss: 0.86730, top1: 0.00115, throughput: 1337.41 | 2022-04-28 10:31:47.715 [rank:5] [train], epoch: 0/1, iter: 300/312, loss: 0.86722, top1: 0.00111, throughput: 1337.54 | 2022-04-28 10:31:47.716 [rank:3] [train], epoch: 0/1, iter: 300/312, loss: 0.86740, top1: 0.00107, throughput: 1337.34 | 2022-04-28 10:31:47.717 [rank:1] [train], epoch: 0/1, iter: 300/312, loss: 0.86737, top1: 0.00098, throughput: 1337.56 | 2022-04-28 10:31:47.715 [rank:2] [train], epoch: 0/1, iter: 300/312, loss: 0.86723, top1: 0.00129, throughput: 1337.41[rank:0] [train], epoch: 0/1, iter: 300/312, loss: 0.86721, top1: 0.00102, throughput: 1338.04 | 2022-04-28 10:31:47.719| 2022-04-28 10:31:47.715 [rank:4] [train], epoch: 0/1, iter: 300/312, loss: 0.86725, top1: 0.00123, throughput: 1337.50 | 2022-04-28 10:31:47.717