loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 ***************************************** Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. ***************************************** loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: loaded library: loaded library: loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1/usr/lib/x86_64-linux-gnu/libibverbs.so.1 /usr/lib/x86_64-linux-gnu/libibverbs.so.1/usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 ------------------------ arguments ------------------------ batches_per_epoch ............................... 312 channel_last .................................... True ddp ............................................. False exit_num ........................................ 300 fuse_bn_add_relu ................................ True fuse_bn_relu .................................... True gpu_stat_file ................................... None grad_clipping ................................... 0.0 graph ........................................... True label_smoothing ................................. 0.1 learning_rate ................................... 4.096 legacy_init ..................................... False load_path ....................................... None lr_decay_type ................................... cosine metric_local .................................... True metric_train_acc ................................ True momentum ........................................ 0.875 nccl_fusion_max_ops ............................. 24 nccl_fusion_threshold_mb ........................ 16 num_classes ..................................... 1000 num_devices_per_node ............................ 8 num_epochs ...................................... 1 num_nodes ....................................... 1 ofrecord_part_num ............................... 256 ofrecord_path ................................... /dataset/79846248 print_interval .................................. 100 print_timestamp ................................. False samples_per_epoch ............................... 1281167 save_init ....................................... False save_path ....................................... None scale_grad ...................................... True skip_eval ....................................... True synthetic_data .................................. False total_batches ................................... -1 train_batch_size ................................ 512 train_global_batch_size ......................... 4096 use_fp16 ........................................ True use_gpu_decode .................................. True val_batch_size .................................. 50 val_batches_per_epoch ........................... 125 val_global_batch_size ........................... 400 val_samples_per_epoch ........................... 50000 warmup_epochs ................................... 5 weight_decay .................................... 3.0517578125e-05 zero_init_residual .............................. True -------------------- end of arguments --------------------- ***** Model Init ***** ***** Model Init Finish, time escapled: 2.89338 s ***** [rank:0] [train], epoch: 0/1, iter: 100/312, loss: 0.86730, top1: 0.00094, throughput: 425.40 | 2022-05-12 02:03:35.252 [rank:7] [train], epoch: 0/1, iter: 100/312, loss: 0.86759, top1: 0.00098, throughput: 425.39 | 2022-05-12 02:03:35.252 [rank:3] [train], epoch: 0/1, iter: 100/312, loss: 0.86726, top1: 0.00105, throughput: 425.39 | 2022-05-12 02:03:35.252 [rank:2] [train], epoch: 0/1, iter: 100/312, loss: 0.86734, top1: 0.00115, throughput: 425.41 | 2022-05-12 02:03:35.252 [rank:1] [train], epoch: 0/1, iter: 100/312, loss: 0.86757, top1: 0.00098, throughput: 425.43 | 2022-05-12 02:03:35.252 [rank:6] [train], epoch: 0/1, iter: 100/312, loss: 0.86730, top1: 0.00094, throughput: 425.41 | 2022-05-12 02:03:35.252 [rank:5] [train], epoch: 0/1, iter: 100/312, loss: 0.86722, top1: 0.00119, throughput: 425.39 | 2022-05-12 02:03:35.255 [rank:4] [train], epoch: 0/1, iter: 100/312, loss: 0.86749, top1: 0.00104, throughput: 425.43 | 2022-05-12 02:03:35.253 timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/05/12 02:03:35.501, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 7781 MiB, 24729 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/05/12 02:03:35.501, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 7781 MiB, 24729 MiB 2022/05/12 02:03:35.501, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 7781 MiB, 24729 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/05/12 02:03:35.505, Tesla V100-SXM2-32GB, 470.57.02, 98 %, 86 %, 32510 MiB, 7778 MiB, 24732 MiB 2022/05/12 02:03:35.506, Tesla V100-SXM2-32GB, 470.57.02, 98 %, 86 %, 32510 MiB, 7778 MiB, 24732 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/05/12 02:03:35.506, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 7781 MiB, 24729 MiB 2022/05/12 02:03:35.506, Tesla V100-SXM2-32GB, 470.57.02, 98 %, 86 %, 32510 MiB, 7778 MiB, 24732 MiB 2022/05/12 02:03:35.507, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 7781 MiB, 24729 MiB 2022/05/12 02:03:35.508, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 7781 MiB, 24729 MiB 2022/05/12 02:03:35.509, Tesla V100-SXM2-32GB, 470.57.02, 78 %, 69 %, 32510 MiB, 7781 MiB, 24729 MiB 2022/05/12 02:03:35.510, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 64 %, 32510 MiB, 7948 MiB, 24562 MiB 2022/05/12 02:03:35.512, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 64 %, 32510 MiB, 7948 MiB, 24562 MiB 2022/05/12 02:03:35.512, Tesla V100-SXM2-32GB, 470.57.02, 98 %, 86 %, 32510 MiB, 7778 MiB, 24732 MiB 2022/05/12 02:03:35.512, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 64 %, 32510 MiB, 7948 MiB, 24562 MiB 2022/05/12 02:03:35.512, Tesla V100-SXM2-32GB, 470.57.02, 78 %, 69 %, 32510 MiB, 7781 MiB, 24729 MiB 2022/05/12 02:03:35.513, Tesla V100-SXM2-32GB, 470.57.02, 98 %, 86 %, 32510 MiB, 7778 MiB, 24732 MiB 2022/05/12 02:03:35.516, Tesla V100-SXM2-32GB, 470.57.02, 98 %, 86 %, 32510 MiB, 7778 MiB, 24732 MiB 2022/05/12 02:03:35.517, Tesla V100-SXM2-32GB, 470.57.02, 98 %, 86 %, 32510 MiB, 7778 MiB, 24732 MiB 2022/05/12 02:03:35.518, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7884 MiB, 24626 MiB 2022/05/12 02:03:35.519, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7884 MiB, 24626 MiB 2022/05/12 02:03:35.520, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 64 %, 32510 MiB, 7948 MiB, 24562 MiB 2022/05/12 02:03:35.520, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7884 MiB, 24626 MiB 2022/05/12 02:03:35.521, Tesla V100-SXM2-32GB, 470.57.02, 98 %, 86 %, 32510 MiB, 7778 MiB, 24732 MiB 2022/05/12 02:03:35.521, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 64 %, 32510 MiB, 7948 MiB, 24562 MiB 2022/05/12 02:03:35.524, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 64 %, 32510 MiB, 7948 MiB, 24562 MiB 2022/05/12 02:03:35.525, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 64 %, 32510 MiB, 7948 MiB, 24562 MiB 2022/05/12 02:03:35.526, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 88 %, 32510 MiB, 7882 MiB, 24628 MiB 2022/05/12 02:03:35.527, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 88 %, 32510 MiB, 7882 MiB, 24628 MiB 2022/05/12 02:03:35.528, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7884 MiB, 24626 MiB 2022/05/12 02:03:35.528, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 88 %, 32510 MiB, 7882 MiB, 24628 MiB 2022/05/12 02:03:35.528, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 64 %, 32510 MiB, 7948 MiB, 24562 MiB 2022/05/12 02:03:35.529, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7884 MiB, 24626 MiB 2022/05/12 02:03:35.531, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7884 MiB, 24626 MiB 2022/05/12 02:03:35.532, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7884 MiB, 24626 MiB 2022/05/12 02:03:35.533, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 70 %, 32510 MiB, 7776 MiB, 24734 MiB 2022/05/12 02:03:35.535, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 70 %, 32510 MiB, 7776 MiB, 24734 MiB 2022/05/12 02:03:35.535, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 88 %, 32510 MiB, 7882 MiB, 24628 MiB 2022/05/12 02:03:35.535, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 70 %, 32510 MiB, 7776 MiB, 24734 MiB 2022/05/12 02:03:35.536, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7884 MiB, 24626 MiB 2022/05/12 02:03:35.537, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 88 %, 32510 MiB, 7882 MiB, 24628 MiB 2022/05/12 02:03:35.539, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 88 %, 32510 MiB, 7882 MiB, 24628 MiB 2022/05/12 02:03:35.540, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 88 %, 32510 MiB, 7882 MiB, 24628 MiB 2022/05/12 02:03:35.541, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/05/12 02:03:35.542, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/05/12 02:03:35.543, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 70 %, 32510 MiB, 7776 MiB, 24734 MiB 2022/05/12 02:03:35.543, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/05/12 02:03:35.544, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 88 %, 32510 MiB, 7882 MiB, 24628 MiB 2022/05/12 02:03:35.545, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 70 %, 32510 MiB, 7776 MiB, 24734 MiB 2022/05/12 02:03:35.547, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 70 %, 32510 MiB, 7776 MiB, 24734 MiB 2022/05/12 02:03:35.548, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 70 %, 32510 MiB, 7776 MiB, 24734 MiB 2022/05/12 02:03:35.549, Tesla V100-SXM2-32GB, 470.57.02, 95 %, 85 %, 32510 MiB, 7832 MiB, 24678 MiB 2022/05/12 02:03:35.550, Tesla V100-SXM2-32GB, 470.57.02, 95 %, 85 %, 32510 MiB, 7832 MiB, 24678 MiB 2022/05/12 02:03:35.551, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/05/12 02:03:35.551, Tesla V100-SXM2-32GB, 470.57.02, 95 %, 85 %, 32510 MiB, 7832 MiB, 24678 MiB 2022/05/12 02:03:35.552, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 70 %, 32510 MiB, 7776 MiB, 24734 MiB 2022/05/12 02:03:35.553, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/05/12 02:03:35.555, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/05/12 02:03:35.556, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/05/12 02:03:35.559, Tesla V100-SXM2-32GB, 470.57.02, 95 %, 85 %, 32510 MiB, 7832 MiB, 24678 MiB 2022/05/12 02:03:35.560, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 84 %, 32510 MiB, 7692 MiB, 24818 MiB 2022/05/12 02:03:35.561, Tesla V100-SXM2-32GB, 470.57.02, 95 %, 85 %, 32510 MiB, 7832 MiB, 24678 MiB 2022/05/12 02:03:35.563, Tesla V100-SXM2-32GB, 470.57.02, 95 %, 85 %, 32510 MiB, 7832 MiB, 24678 MiB 2022/05/12 02:03:35.564, Tesla V100-SXM2-32GB, 470.57.02, 95 %, 85 %, 32510 MiB, 7832 MiB, 24678 MiB 2022/05/12 02:03:35.567, Tesla V100-SXM2-32GB, 470.57.02, 95 %, 85 %, 32510 MiB, 7832 MiB, 24678 MiB [rank:2] [train], epoch: 0/1, iter: 200/312, loss: 0.86767, top1: 0.00100, throughput: 1343.07 | 2022-05-12 02:04:13.374 [rank:6] [train], epoch: 0/1, iter: 200/312, loss: 0.86752, top1: 0.00094, throughput: 1343.03 | 2022-05-12 02:04:13.375 [rank:4] [train], epoch: 0/1, iter: 200/312, loss: 0.86747, top1: 0.00090, throughput: 1343.04 | 2022-05-12 02:04:13.376 [rank:3] [train], epoch: 0/1, iter: 200/312, loss: 0.86750, top1: 0.00098, throughput: 1343.03 | 2022-05-12 02:04:13.375 [rank:5] [train], epoch: 0/1, iter: 200/312, loss: 0.86746, top1: 0.00117, throughput: 1343.10 | 2022-05-12 02:04:13.376 [rank:1] [train], epoch: 0/1, iter: 200/312, loss: 0.86768, top1: 0.00082, throughput: 1343.03 | 2022-05-12 02:04:13.375 [rank:7] [train], epoch: 0/1, iter: 200/312, loss: 0.86746, top1: 0.00127, throughput: 1342.99 | 2022-05-12 02:04:13.376 [rank:0] [train], epoch: 0/1, iter: 200/312, loss: 0.86771, top1: 0.00092, throughput: 1342.93 | 2022-05-12 02:04:13.378 [rank:2] [train], epoch: 0/1, iter: 300/312, loss: 0.86758, top1: 0.00096, throughput: 1359.83 | 2022-05-12 02:04:51.025 [rank:5] [train], epoch: 0/1, iter: 300/312, loss: 0.86756, top1: 0.00074, throughput: 1359.87 | 2022-05-12 02:04:51.026 [rank:0] [train], epoch: 0/1, iter: 300/312, loss: 0.86780, top1: 0.00092, throughput: 1359.89 | 2022-05-12 02:04:51.028 [rank:7] [train], epoch: 0/1, iter: 300/312, loss: 0.86738, top1: 0.00109, throughput: 1359.87 | 2022-05-12 02:04:51.027 [rank:6] [train], epoch: 0/1, iter: 300/312, loss: 0.86747, top1: 0.00096, throughput: 1359.79 | 2022-05-12 02:04:51.028 [rank:1] [train], epoch: 0/1, iter: 300/312, loss: 0.86738, top1: 0.00068, throughput: 1359.83 | 2022-05-12 02:04:51.027 [rank:3] [train], epoch: 0/1, iter: 300/312, loss: 0.86757, top1: 0.00094, throughput: 1359.78 | 2022-05-12 02:04:51.028 [rank:4] [train], epoch: 0/1, iter: 300/312, loss: 0.86763, top1: 0.00090, throughput: 1355.41 | 2022-05-12 02:04:51.151