loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 ***************************************** Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. ***************************************** loaded library: loaded library: loaded library: loaded library: loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1/usr/lib/x86_64-linux-gnu/libibverbs.so.1/usr/lib/x86_64-linux-gnu/libibverbs.so.1/usr/lib/x86_64-linux-gnu/libibverbs.so.1 /usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1/usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 ------------------------ arguments ------------------------ batches_per_epoch ............................... 312 channel_last .................................... True ddp ............................................. False exit_num ........................................ 300 fuse_bn_add_relu ................................ True fuse_bn_relu .................................... True gpu_stat_file ................................... None grad_clipping ................................... 0.0 graph ........................................... True label_smoothing ................................. 0.1 learning_rate ................................... 4.096 legacy_init ..................................... False load_path ....................................... None lr_decay_type ................................... cosine metric_local .................................... True metric_train_acc ................................ True momentum ........................................ 0.875 nccl_fusion_max_ops ............................. 24 nccl_fusion_threshold_mb ........................ 16 num_classes ..................................... 1000 num_devices_per_node ............................ 8 num_epochs ...................................... 1 num_nodes ....................................... 1 ofrecord_part_num ............................... 256 ofrecord_path ................................... /dataset/79846248 print_interval .................................. 100 print_timestamp ................................. False samples_per_epoch ............................... 1281167 save_init ....................................... False save_path ....................................... None scale_grad ...................................... True skip_eval ....................................... True synthetic_data .................................. False total_batches ................................... -1 train_batch_size ................................ 512 train_global_batch_size ......................... 4096 use_fp16 ........................................ True use_gpu_decode .................................. True val_batch_size .................................. 50 val_batches_per_epoch ........................... 125 val_global_batch_size ........................... 400 val_samples_per_epoch ........................... 50000 warmup_epochs ................................... 5 weight_decay .................................... 3.0517578125e-05 zero_init_residual .............................. True -------------------- end of arguments --------------------- ***** Model Init ***** ***** Model Init Finish, time escapled: 3.00766 s ***** [rank:3] [train], epoch: 0/1, iter: 100/312, loss: 0.86727, top1: 0.00096, throughput: 424.63 | 2022-04-30 02:29:54.518 [rank:5] [train], epoch: 0/1, iter: 100/312, loss: 0.86733, top1: 0.00096, throughput: 424.64 | 2022-04-30 02:29:54.517 [rank:7] [train], epoch: 0/1, iter: 100/312, loss: 0.86725, top1: 0.00092, throughput: 424.63 | 2022-04-30 02:29:54.518 [rank:2] [train], epoch: 0/1, iter: 100/312, loss: 0.86733, top1: 0.00078, throughput: 424.64 | 2022-04-30 02:29:54.519 [rank:1] [train], epoch: 0/1, iter: 100/312, loss: 0.86737, top1: 0.00104, throughput: 424.64 | 2022-04-30 02:29:54.517 [rank:4] [train], epoch: 0/1, iter: 100/312, loss: 0.86765, top1: 0.00096, throughput: 424.62 | 2022-04-30 02:29:54.519 [rank:0] [train], epoch: 0/1, iter: 100/312, loss: 0.86739, top1: 0.00094, throughput: 424.65 | 2022-04-30 02:29:54.521 [rank:6] [train], epoch: 0/1, iter: 100/312, loss: 0.86742, top1: 0.00092, throughput: 424.62 | 2022-04-30 02:29:54.519 timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/04/30 02:29:54.763, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 7773 MiB, 24737 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/04/30 02:29:54.769, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 7724 MiB, 24786 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/04/30 02:29:54.770, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 7773 MiB, 24737 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/04/30 02:29:54.772, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 7773 MiB, 24737 MiB 2022/04/30 02:29:54.772, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 7773 MiB, 24737 MiB 2022/04/30 02:29:54.774, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7956 MiB, 24554 MiB 2022/04/30 02:29:54.775, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 7773 MiB, 24737 MiB 2022/04/30 02:29:54.776, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 7724 MiB, 24786 MiB 2022/04/30 02:29:54.775, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 7773 MiB, 24737 MiB 2022/04/30 02:29:54.776, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 7773 MiB, 24737 MiB 2022/04/30 02:29:54.776, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 7773 MiB, 24737 MiB 2022/04/30 02:29:54.778, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 7724 MiB, 24786 MiB 2022/04/30 02:29:54.778, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 7724 MiB, 24786 MiB 2022/04/30 02:29:54.781, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 74 %, 32510 MiB, 7912 MiB, 24598 MiB 2022/04/30 02:29:54.782, Tesla V100-SXM2-32GB, 470.57.02, 73 %, 65 %, 32510 MiB, 7724 MiB, 24786 MiB 2022/04/30 02:29:54.783, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7956 MiB, 24554 MiB 2022/04/30 02:29:54.784, Tesla V100-SXM2-32GB, 470.57.02, 73 %, 65 %, 32510 MiB, 7724 MiB, 24786 MiB 2022/04/30 02:29:54.785, Tesla V100-SXM2-32GB, 470.57.02, 73 %, 65 %, 32510 MiB, 7724 MiB, 24786 MiB 2022/04/30 02:29:54.785, Tesla V100-SXM2-32GB, 470.57.02, 73 %, 65 %, 32510 MiB, 7724 MiB, 24786 MiB 2022/04/30 02:29:54.786, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7956 MiB, 24554 MiB 2022/04/30 02:29:54.786, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7956 MiB, 24554 MiB 2022/04/30 02:29:54.789, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/30 02:29:54.790, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7956 MiB, 24554 MiB 2022/04/30 02:29:54.791, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 74 %, 32510 MiB, 7912 MiB, 24598 MiB 2022/04/30 02:29:54.792, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7956 MiB, 24554 MiB 2022/04/30 02:29:54.792, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7956 MiB, 24554 MiB 2022/04/30 02:29:54.793, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7956 MiB, 24554 MiB 2022/04/30 02:29:54.794, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 74 %, 32510 MiB, 7912 MiB, 24598 MiB 2022/04/30 02:29:54.794, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 74 %, 32510 MiB, 7912 MiB, 24598 MiB 2022/04/30 02:29:54.797, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7808 MiB, 24702 MiB 2022/04/30 02:29:54.799, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 74 %, 32510 MiB, 7912 MiB, 24598 MiB 2022/04/30 02:29:54.800, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/30 02:29:54.800, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 74 %, 32510 MiB, 7912 MiB, 24598 MiB 2022/04/30 02:29:54.801, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 74 %, 32510 MiB, 7912 MiB, 24598 MiB 2022/04/30 02:29:54.801, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 74 %, 32510 MiB, 7912 MiB, 24598 MiB 2022/04/30 02:29:54.802, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/30 02:29:54.803, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/30 02:29:54.805, Tesla V100-SXM2-32GB, 470.57.02, 89 %, 81 %, 32510 MiB, 7670 MiB, 24840 MiB 2022/04/30 02:29:54.806, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/30 02:29:54.807, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7808 MiB, 24702 MiB 2022/04/30 02:29:54.808, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/30 02:29:54.809, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/30 02:29:54.809, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 87 %, 32510 MiB, 7914 MiB, 24596 MiB 2022/04/30 02:29:54.810, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7808 MiB, 24702 MiB 2022/04/30 02:29:54.810, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7808 MiB, 24702 MiB 2022/04/30 02:29:54.813, Tesla V100-SXM2-32GB, 470.57.02, 82 %, 73 %, 32510 MiB, 7822 MiB, 24688 MiB 2022/04/30 02:29:54.814, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7808 MiB, 24702 MiB 2022/04/30 02:29:54.815, Tesla V100-SXM2-32GB, 470.57.02, 89 %, 81 %, 32510 MiB, 7670 MiB, 24840 MiB 2022/04/30 02:29:54.815, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7808 MiB, 24702 MiB 2022/04/30 02:29:54.816, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7808 MiB, 24702 MiB 2022/04/30 02:29:54.816, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 72 %, 32510 MiB, 7808 MiB, 24702 MiB 2022/04/30 02:29:54.817, Tesla V100-SXM2-32GB, 470.57.02, 89 %, 81 %, 32510 MiB, 7670 MiB, 24840 MiB 2022/04/30 02:29:54.817, Tesla V100-SXM2-32GB, 470.57.02, 89 %, 81 %, 32510 MiB, 7670 MiB, 24840 MiB 2022/04/30 02:29:54.821, Tesla V100-SXM2-32GB, 470.57.02, 89 %, 81 %, 32510 MiB, 7670 MiB, 24840 MiB 2022/04/30 02:29:54.822, Tesla V100-SXM2-32GB, 470.57.02, 82 %, 73 %, 32510 MiB, 7822 MiB, 24688 MiB 2022/04/30 02:29:54.822, Tesla V100-SXM2-32GB, 470.57.02, 89 %, 81 %, 32510 MiB, 7670 MiB, 24840 MiB 2022/04/30 02:29:54.823, Tesla V100-SXM2-32GB, 470.57.02, 89 %, 81 %, 32510 MiB, 7670 MiB, 24840 MiB 2022/04/30 02:29:54.824, Tesla V100-SXM2-32GB, 470.57.02, 89 %, 81 %, 32510 MiB, 7670 MiB, 24840 MiB 2022/04/30 02:29:54.824, Tesla V100-SXM2-32GB, 470.57.02, 82 %, 73 %, 32510 MiB, 7822 MiB, 24688 MiB 2022/04/30 02:29:54.824, Tesla V100-SXM2-32GB, 470.57.02, 82 %, 73 %, 32510 MiB, 7822 MiB, 24688 MiB 2022/04/30 02:29:54.828, Tesla V100-SXM2-32GB, 470.57.02, 82 %, 73 %, 32510 MiB, 7822 MiB, 24688 MiB 2022/04/30 02:29:54.829, Tesla V100-SXM2-32GB, 470.57.02, 82 %, 73 %, 32510 MiB, 7822 MiB, 24688 MiB 2022/04/30 02:29:54.830, Tesla V100-SXM2-32GB, 470.57.02, 82 %, 73 %, 32510 MiB, 7822 MiB, 24688 MiB 2022/04/30 02:29:54.831, Tesla V100-SXM2-32GB, 470.57.02, 82 %, 73 %, 32510 MiB, 7822 MiB, 24688 MiB [rank:6] [train], epoch: 0/1, iter: 200/312, loss: 0.86744, top1: 0.00074, throughput: 1318.91 | 2022-04-30 02:30:33.339 [rank:1] [train], epoch: 0/1, iter: 200/312, loss: 0.86729, top1: 0.00088, throughput: 1318.89 | 2022-04-30 02:30:33.338 [rank:3] [train], epoch: 0/1, iter: 200/312, loss: 0.86738, top1: 0.00086, throughput: 1318.93 | 2022-04-30 02:30:33.337[rank:2] [train], epoch: 0/1, iter: 200/312, loss: 0.86755, top1: 0.00062, throughput: 1318.91 | 2022-04-30 02:30:33.339 [rank:7] [train], epoch: 0/1, iter: 200/312, loss: 0.86748, top1: 0.00080, throughput: 1318.73 | 2022-04-30 02:30:33.343 [rank:0] [train], epoch: 0/1, iter: 200/312, loss: 0.86719, top1: 0.00105, throughput: 1318.86 | 2022-04-30 02:30:33.342 [rank:4] [train], epoch: 0/1, iter: 200/312, loss: 0.86753, top1: 0.00082, throughput: 1318.74 | 2022-04-30 02:30:33.344 [rank:5] [train], epoch: 0/1, iter: 200/312, loss: 0.86755, top1: 0.00074, throughput: 1318.48 | 2022-04-30 02:30:33.350 [rank:3] [train], epoch: 0/1, iter: 300/312, loss: 0.86756, top1: 0.00105, throughput: 1382.26 | 2022-04-30 02:31:10.378 [rank:1] [train], epoch: 0/1, iter: 300/312, loss: 0.86723, top1: 0.00078, throughput: 1382.27 | 2022-04-30 02:31:10.378 [rank:0] [train], epoch: 0/1, iter: 300/312, loss: 0.86773, top1: 0.00061, throughput: 1382.33 | 2022-04-30 02:31:10.381 [rank:5] [train], epoch: 0/1, iter: 300/312, loss: 0.86710, top1: 0.00094, throughput: 1382.74 | 2022-04-30 02:31:10.378 [rank:7] [train], epoch: 0/1, iter: 300/312, loss: 0.86737, top1: 0.00096, throughput: 1382.44 | 2022-04-30 02:31:10.379 [rank:4] [train], epoch: 0/1, iter: 300/312, loss: 0.86744, top1: 0.00080, throughput: 1382.45 | 2022-04-30 02:31:10.380 [rank:2] [train], epoch: 0/1, iter: 300/312, loss: 0.86774, top1: 0.00082, throughput: 1382.20 | 2022-04-30 02:31:10.381 [rank:6] [train], epoch: 0/1, iter: 300/312, loss: 0.86746, top1: 0.00080, throughput: 1382.27 | 2022-04-30 02:31:10.379