loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 ***************************************** Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. ***************************************** loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: loaded library: loaded library: loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1/usr/lib/x86_64-linux-gnu/libibverbs.so.1/usr/lib/x86_64-linux-gnu/libibverbs.so.1/usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: loaded library: loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1/usr/lib/x86_64-linux-gnu/libibverbs.so.1/usr/lib/x86_64-linux-gnu/libibverbs.so.1 ------------------------ arguments ------------------------ batches_per_epoch ............................... 625 channel_last .................................... False ddp ............................................. False exit_num ........................................ 300 fuse_bn_add_relu ................................ True fuse_bn_relu .................................... True gpu_stat_file ................................... None grad_clipping ................................... 0.0 graph ........................................... True label_smoothing ................................. 0.1 learning_rate ................................... 2.048 legacy_init ..................................... False load_path ....................................... None lr_decay_type ................................... cosine metric_local .................................... True metric_train_acc ................................ True momentum ........................................ 0.875 nccl_fusion_max_ops ............................. 24 nccl_fusion_threshold_mb ........................ 16 num_classes ..................................... 1000 num_devices_per_node ............................ 8 num_epochs ...................................... 1 num_nodes ....................................... 1 ofrecord_part_num ............................... 256 ofrecord_path ................................... /dataset/79846248 print_interval .................................. 100 print_timestamp ................................. False samples_per_epoch ............................... 1281167 save_init ....................................... False save_path ....................................... None scale_grad ...................................... True skip_eval ....................................... True synthetic_data .................................. False total_batches ................................... -1 train_batch_size ................................ 256 train_global_batch_size ......................... 2048 use_fp16 ........................................ False use_gpu_decode .................................. False val_batch_size .................................. 50 val_batches_per_epoch ........................... 125 val_global_batch_size ........................... 400 val_samples_per_epoch ........................... 50000 warmup_epochs ................................... 5 weight_decay .................................... 3.0517578125e-05 zero_init_residual .............................. True -------------------- end of arguments --------------------- ***** Model Init ***** ***** Model Init Finish, time escapled: 3.13974 s ***** [rank:6] [train], epoch: 0/1, iter: 100/625, loss: 0.86778, top1: 0.00082, throughput: 274.81 | 2022-05-23 09:51:28.750 [rank:0] [train], epoch: 0/1, iter: 100/625, loss: 0.86758, top1: 0.00133, throughput: 274.80 | 2022-05-23 09:51:28.752 [rank:2] [train], epoch: 0/1, iter: 100/625, loss: 0.86798, top1: 0.00113, throughput: 274.82 | 2022-05-23 09:51:28.751 [rank:7] [train], epoch: 0/1, iter: 100/625, loss: 0.86769, top1: 0.00070, throughput: 274.82 | 2022-05-23 09:51:28.751 [rank:1] [train], epoch: 0/1, iter: 100/625, loss: 0.86791, top1: 0.00113, throughput: 274.81 | 2022-05-23 09:51:28.751 [rank:4] [train], epoch: 0/1, iter: 100/625, loss: 0.86781, top1: 0.00082, throughput: 274.81 | 2022-05-23 09:51:28.753 [rank:5] [train], epoch: 0/1, iter: 100/625, loss: 0.86762, top1: 0.00078, throughput: 274.82 | 2022-05-23 09:51:28.751 [rank:3] [train], epoch: 0/1, iter: 100/625, loss: 0.86767, top1: 0.00129, throughput: 274.81 | 2022-05-23 09:51:28.751 timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/05/23 09:51:29.061, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8468 MiB, 24042 MiB 2022/05/23 09:51:29.068, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 60 %, 32510 MiB, 8484 MiB, 24026 MiB 2022/05/23 09:51:29.076, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8578 MiB, 23932 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/05/23 09:51:29.088, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8554 MiB, 23956 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/05/23 09:51:29.093, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8574 MiB, 23936 MiB 2022/05/23 09:51:29.093, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8468 MiB, 24042 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/05/23 09:51:29.094, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8468 MiB, 24042 MiB 2022/05/23 09:51:29.094, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8468 MiB, 24042 MiB 2022/05/23 09:51:29.096, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8468 MiB, 24042 MiB 2022/05/23 09:51:29.098, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8468 MiB, 24042 MiB 2022/05/23 09:51:29.099, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8466 MiB, 24044 MiB 2022/05/23 09:51:29.099, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 60 %, 32510 MiB, 8484 MiB, 24026 MiB 2022/05/23 09:51:29.100, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8468 MiB, 24042 MiB 2022/05/23 09:51:29.101, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 60 %, 32510 MiB, 8484 MiB, 24026 MiB 2022/05/23 09:51:29.101, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8468 MiB, 24042 MiB 2022/05/23 09:51:29.102, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 60 %, 32510 MiB, 8484 MiB, 24026 MiB 2022/05/23 09:51:29.105, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 60 %, 32510 MiB, 8484 MiB, 24026 MiB 2022/05/23 09:51:29.108, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 60 %, 32510 MiB, 8484 MiB, 24026 MiB 2022/05/23 09:51:29.108, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8356 MiB, 24154 MiB 2022/05/23 09:51:29.109, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8578 MiB, 23932 MiB 2022/05/23 09:51:29.110, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 60 %, 32510 MiB, 8484 MiB, 24026 MiB 2022/05/23 09:51:29.111, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8578 MiB, 23932 MiB 2022/05/23 09:51:29.111, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 60 %, 32510 MiB, 8484 MiB, 24026 MiB 2022/05/23 09:51:29.112, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8578 MiB, 23932 MiB 2022/05/23 09:51:29.114, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8578 MiB, 23932 MiB 2022/05/23 09:51:29.117, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8578 MiB, 23932 MiB 2022/05/23 09:51:29.118, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 57 %, 32510 MiB, 8556 MiB, 23954 MiB 2022/05/23 09:51:29.118, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8554 MiB, 23956 MiB 2022/05/23 09:51:29.119, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8578 MiB, 23932 MiB 2022/05/23 09:51:29.120, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8554 MiB, 23956 MiB 2022/05/23 09:51:29.121, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8578 MiB, 23932 MiB 2022/05/23 09:51:29.121, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8554 MiB, 23956 MiB 2022/05/23 09:51:29.124, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8554 MiB, 23956 MiB 2022/05/23 09:51:29.126, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8554 MiB, 23956 MiB 2022/05/23 09:51:29.127, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8574 MiB, 23936 MiB 2022/05/23 09:51:29.136, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8554 MiB, 23956 MiB 2022/05/23 09:51:29.136, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8574 MiB, 23936 MiB 2022/05/23 09:51:29.137, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 61 %, 32510 MiB, 8554 MiB, 23956 MiB 2022/05/23 09:51:29.138, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8574 MiB, 23936 MiB 2022/05/23 09:51:29.140, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8574 MiB, 23936 MiB 2022/05/23 09:51:29.142, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8574 MiB, 23936 MiB 2022/05/23 09:51:29.143, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8466 MiB, 24044 MiB 2022/05/23 09:51:29.145, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8574 MiB, 23936 MiB 2022/05/23 09:51:29.145, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8466 MiB, 24044 MiB 2022/05/23 09:51:29.146, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8574 MiB, 23936 MiB 2022/05/23 09:51:29.147, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8466 MiB, 24044 MiB 2022/05/23 09:51:29.149, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8466 MiB, 24044 MiB 2022/05/23 09:51:29.151, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8466 MiB, 24044 MiB 2022/05/23 09:51:29.152, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8356 MiB, 24154 MiB 2022/05/23 09:51:29.153, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8466 MiB, 24044 MiB 2022/05/23 09:51:29.154, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8356 MiB, 24154 MiB 2022/05/23 09:51:29.155, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8466 MiB, 24044 MiB 2022/05/23 09:51:29.155, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8356 MiB, 24154 MiB 2022/05/23 09:51:29.157, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8356 MiB, 24154 MiB 2022/05/23 09:51:29.160, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8356 MiB, 24154 MiB 2022/05/23 09:51:29.161, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8556 MiB, 23954 MiB 2022/05/23 09:51:29.162, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8356 MiB, 24154 MiB 2022/05/23 09:51:29.162, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8556 MiB, 23954 MiB 2022/05/23 09:51:29.163, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8356 MiB, 24154 MiB 2022/05/23 09:51:29.164, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8556 MiB, 23954 MiB 2022/05/23 09:51:29.166, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8556 MiB, 23954 MiB 2022/05/23 09:51:29.172, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8556 MiB, 23954 MiB 2022/05/23 09:51:29.174, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8556 MiB, 23954 MiB 2022/05/23 09:51:29.176, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8556 MiB, 23954 MiB [rank:1] [train], epoch: 0/1, iter: 200/625, loss: 0.86765, top1: 0.00086, throughput: 378.70 | 2022-05-23 09:52:36.352 [rank:0] [train], epoch: 0/1, iter: 200/625, loss: 0.86765, top1: 0.00098, throughput: 378.69 | 2022-05-23 09:52:36.352 [rank:5] [train], epoch: 0/1, iter: 200/625, loss: 0.86747, top1: 0.00105, throughput: 378.69 | 2022-05-23 09:52:36.353 [rank:3] [train], epoch: 0/1, iter: 200/625, loss: 0.86797, top1: 0.00078, throughput: 378.69 | 2022-05-23 09:52:36.353 [rank:2] [train], epoch: 0/1, iter: 200/625, loss: 0.86756, top1: 0.00066, throughput: 378.69 | 2022-05-23 09:52:36.353 [rank:4] [train], epoch: 0/1, iter: 200/625, loss: 0.86789, top1: 0.00082, throughput: 378.69 | 2022-05-23 09:52:36.354 [rank:7] [train], epoch: 0/1, iter: 200/625, loss: 0.86762, top1: 0.00117, throughput: 378.69 | 2022-05-23 09:52:36.353 [rank:6] [train], epoch: 0/1, iter: 200/625, loss: 0.86790, top1: 0.00113, throughput: 378.68 | 2022-05-23 09:52:36.353 [rank:5] [train], epoch: 0/1, iter: 300/625, loss: 0.86733, top1: 0.00086, throughput: 378.67 | 2022-05-23 09:53:43.959 [rank:0] [train], epoch: 0/1, iter: 300/625, loss: 0.86755, top1: 0.00102, throughput: 378.66 | 2022-05-23 09:53:43.959 [rank:7] [train], epoch: 0/1, iter: 300/625, loss: 0.86740, top1: 0.00074, throughput: 378.66 | 2022-05-23 09:53:43.961 [rank:2] [train], epoch: 0/1, iter: 300/625, loss: 0.86769, top1: 0.00062, throughput: 378.67 | 2022-05-23 09:53:43.958 [rank:1] [train], epoch: 0/1, iter: 300/625, loss: 0.86780, top1: 0.00113, throughput: 378.66 | 2022-05-23 09:53:43.959 [rank:6] [train], epoch: 0/1, iter: 300/625, loss: 0.86778, top1: 0.00098, throughput: 378.66 | 2022-05-23 09:53:43.960 [rank:3] [train], epoch: 0/1, iter: 300/625, loss: 0.86769, top1: 0.00145, throughput: 378.66 | 2022-05-23 09:53:43.960 [rank:4] [train], epoch: 0/1, iter: 300/625, loss: 0.86782, top1: 0.00113, throughput: 378.66 | 2022-05-23 09:53:43.961