loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 ***************************************** Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. ***************************************** loaded library: loaded library: loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1/usr/lib/x86_64-linux-gnu/libibverbs.so.1 /usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1/usr/lib/x86_64-linux-gnu/libibverbs.so.1 loaded library: /usr/lib/x86_64-linux-gnu/libibverbs.so.1 ------------------------ arguments ------------------------ batches_per_epoch ............................... 625 channel_last .................................... False ddp ............................................. False exit_num ........................................ 300 fuse_bn_add_relu ................................ True fuse_bn_relu .................................... True gpu_stat_file ................................... None grad_clipping ................................... 0.0 graph ........................................... True label_smoothing ................................. 0.1 learning_rate ................................... 2.048 legacy_init ..................................... False load_path ....................................... None lr_decay_type ................................... cosine metric_local .................................... True metric_train_acc ................................ True momentum ........................................ 0.875 nccl_fusion_max_ops ............................. 24 nccl_fusion_threshold_mb ........................ 16 num_classes ..................................... 1000 num_devices_per_node ............................ 8 num_epochs ...................................... 1 num_nodes ....................................... 1 ofrecord_part_num ............................... 256 ofrecord_path ................................... /dataset/79846248 print_interval .................................. 100 print_timestamp ................................. False samples_per_epoch ............................... 1281167 save_init ....................................... False save_path ....................................... None scale_grad ...................................... True skip_eval ....................................... True synthetic_data .................................. False total_batches ................................... -1 train_batch_size ................................ 256 train_global_batch_size ......................... 2048 use_fp16 ........................................ False use_gpu_decode .................................. False val_batch_size .................................. 50 val_batches_per_epoch ........................... 125 val_global_batch_size ........................... 400 val_samples_per_epoch ........................... 50000 warmup_epochs ................................... 5 weight_decay .................................... 3.0517578125e-05 zero_init_residual .............................. True -------------------- end of arguments --------------------- ***** Model Init ***** ***** Model Init Finish, time escapled: 3.00094 s ***** [rank:3] [train], epoch: 0/1, iter: 100/625, loss: 0.86750, top1: 0.00051, throughput: 276.49 | 2022-04-27 10:49:27.554 [rank:0] [train], epoch: 0/1, iter: 100/625, loss: 0.86732, top1: 0.00074, throughput: 276.49 | 2022-04-27 10:49:27.556 [rank:5] [train], epoch: 0/1, iter: 100/625, loss: 0.86736, top1: 0.00062, throughput: 276.50 | 2022-04-27 10:49:27.554 [rank:2] [train], epoch: 0/1, iter: 100/625, loss: 0.86750, top1: 0.00074, throughput: 276.48 | 2022-04-27 10:49:27.553 [rank:1] [train], epoch: 0/1, iter: 100/625, loss: 0.86738, top1: 0.00070, throughput: 276.49 | 2022-04-27 10:49:27.554 [rank:7] [train], epoch: 0/1, iter: 100/625, loss: 0.86757, top1: 0.00086, throughput: 276.47 | 2022-04-27 10:49:27.555 [rank:4] [train], epoch: 0/1, iter: 100/625, loss: 0.86747, top1: 0.00047, throughput: 276.47 | 2022-04-27 10:49:27.557 [rank:6] [train], epoch: 0/1, iter: 100/625, loss: 0.86744, top1: 0.00051, throughput: 276.48 | 2022-04-27 10:49:27.553 timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/04/27 10:49:27.883, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 59 %, 32510 MiB, 8472 MiB, 24038 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/04/27 10:49:27.892, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8474 MiB, 24036 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/04/27 10:49:27.892, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 59 %, 32510 MiB, 8472 MiB, 24038 MiB 2022/04/27 10:49:27.895, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 59 %, 32510 MiB, 8472 MiB, 24038 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/04/27 10:49:27.899, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8662 MiB, 23848 MiB 2022/04/27 10:49:27.899, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 59 %, 32510 MiB, 8472 MiB, 24038 MiB 2022/04/27 10:49:27.900, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8474 MiB, 24036 MiB timestamp, name, driver_version, utilization.gpu [%], utilization.memory [%], memory.total [MiB], memory.free [MiB], memory.used [MiB] 2022/04/27 10:49:27.899, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 59 %, 32510 MiB, 8472 MiB, 24038 MiB 2022/04/27 10:49:27.904, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8474 MiB, 24036 MiB 2022/04/27 10:49:27.905, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 59 %, 32510 MiB, 8472 MiB, 24038 MiB 2022/04/27 10:49:27.907, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8566 MiB, 23944 MiB 2022/04/27 10:49:27.906, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 59 %, 32510 MiB, 8472 MiB, 24038 MiB 2022/04/27 10:49:27.910, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8474 MiB, 24036 MiB 2022/04/27 10:49:27.910, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8662 MiB, 23848 MiB 2022/04/27 10:49:27.911, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8474 MiB, 24036 MiB 2022/04/27 10:49:27.911, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 59 %, 32510 MiB, 8472 MiB, 24038 MiB 2022/04/27 10:49:27.915, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8662 MiB, 23848 MiB 2022/04/27 10:49:27.917, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8474 MiB, 24036 MiB 2022/04/27 10:49:27.918, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8634 MiB, 23876 MiB 2022/04/27 10:49:27.919, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8474 MiB, 24036 MiB 2022/04/27 10:49:27.919, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8662 MiB, 23848 MiB 2022/04/27 10:49:27.919, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8566 MiB, 23944 MiB 2022/04/27 10:49:27.920, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8662 MiB, 23848 MiB 2022/04/27 10:49:27.922, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8474 MiB, 24036 MiB 2022/04/27 10:49:27.925, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8566 MiB, 23944 MiB 2022/04/27 10:49:27.927, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8662 MiB, 23848 MiB 2022/04/27 10:49:27.928, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8418 MiB, 24092 MiB 2022/04/27 10:49:27.929, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8662 MiB, 23848 MiB 2022/04/27 10:49:27.929, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8566 MiB, 23944 MiB 2022/04/27 10:49:27.930, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8634 MiB, 23876 MiB 2022/04/27 10:49:27.930, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8566 MiB, 23944 MiB 2022/04/27 10:49:27.931, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8662 MiB, 23848 MiB 2022/04/27 10:49:27.933, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8634 MiB, 23876 MiB 2022/04/27 10:49:27.936, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8566 MiB, 23944 MiB 2022/04/27 10:49:27.937, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8362 MiB, 24148 MiB 2022/04/27 10:49:27.938, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8566 MiB, 23944 MiB 2022/04/27 10:49:27.938, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8634 MiB, 23876 MiB 2022/04/27 10:49:27.938, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 58 %, 32510 MiB, 8418 MiB, 24092 MiB 2022/04/27 10:49:27.939, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8634 MiB, 23876 MiB 2022/04/27 10:49:27.940, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8566 MiB, 23944 MiB 2022/04/27 10:49:27.944, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8418 MiB, 24092 MiB 2022/04/27 10:49:27.947, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8634 MiB, 23876 MiB 2022/04/27 10:49:27.948, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 62 %, 32510 MiB, 8494 MiB, 24016 MiB 2022/04/27 10:49:27.948, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8634 MiB, 23876 MiB 2022/04/27 10:49:27.949, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8418 MiB, 24092 MiB 2022/04/27 10:49:27.949, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8362 MiB, 24148 MiB 2022/04/27 10:49:27.950, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8418 MiB, 24092 MiB 2022/04/27 10:49:27.951, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 62 %, 32510 MiB, 8634 MiB, 23876 MiB 2022/04/27 10:49:27.953, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8362 MiB, 24148 MiB 2022/04/27 10:49:27.956, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8418 MiB, 24092 MiB 2022/04/27 10:49:27.957, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8418 MiB, 24092 MiB 2022/04/27 10:49:27.958, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8362 MiB, 24148 MiB 2022/04/27 10:49:27.958, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 62 %, 32510 MiB, 8494 MiB, 24016 MiB 2022/04/27 10:49:27.964, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8362 MiB, 24148 MiB 2022/04/27 10:49:27.964, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 65 %, 32510 MiB, 8418 MiB, 24092 MiB 2022/04/27 10:49:27.967, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 62 %, 32510 MiB, 8494 MiB, 24016 MiB 2022/04/27 10:49:27.969, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8362 MiB, 24148 MiB 2022/04/27 10:49:27.971, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8362 MiB, 24148 MiB 2022/04/27 10:49:27.971, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 62 %, 32510 MiB, 8494 MiB, 24016 MiB 2022/04/27 10:49:27.972, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 62 %, 32510 MiB, 8494 MiB, 24016 MiB 2022/04/27 10:49:27.973, Tesla V100-SXM2-32GB, 470.57.02, 100 %, 63 %, 32510 MiB, 8362 MiB, 24148 MiB 2022/04/27 10:49:27.977, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 62 %, 32510 MiB, 8494 MiB, 24016 MiB 2022/04/27 10:49:27.979, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 62 %, 32510 MiB, 8494 MiB, 24016 MiB 2022/04/27 10:49:27.981, Tesla V100-SXM2-32GB, 470.57.02, 99 %, 62 %, 32510 MiB, 8494 MiB, 24016 MiB [rank:2] [train], epoch: 0/1, iter: 200/625, loss: 0.86793, top1: 0.00066, throughput: 379.16[rank:6] [train], epoch: 0/1, iter: 200/625, loss: 0.86750, top1: 0.00074, throughput: 379.15 | 2022-04-27 10:50:35.071 | 2022-04-27 10:50:35.071 [rank:3] [train], epoch: 0/1, iter: 200/625, loss: 0.86746, top1: 0.00098, throughput: 379.16 | 2022-04-27 10:50:35.072 [rank:0] [train], epoch: 0/1, iter: 200/625, loss: 0.86772, top1: 0.00047, throughput: 379.17 | 2022-04-27 10:50:35.072 [rank:1] [train], epoch: 0/1, iter: 200/625, loss: 0.86738, top1: 0.00082, throughput: 379.16 | 2022-04-27 10:50:35.071 [rank:5] [train], epoch: 0/1, iter: 200/625, loss: 0.86750, top1: 0.00066, throughput: 379.16 | 2022-04-27 10:50:35.071 [rank:4] [train], epoch: 0/1, iter: 200/625, loss: 0.86760, top1: 0.00074, throughput: 379.16 | 2022-04-27 10:50:35.075 [rank:7] [train], epoch: 0/1, iter: 200/625, loss: 0.86732, top1: 0.00047, throughput: 379.16 | 2022-04-27 10:50:35.072 [rank:2] [train], epoch: 0/1, iter: 300/625, loss: 0.86776, top1: 0.00055, throughput: 377.89 | 2022-04-27 10:51:42.816 [rank:6] [train], epoch: 0/1, iter: 300/625, loss: 0.86749, top1: 0.00074, throughput: 377.89 | 2022-04-27 10:51:42.817 [rank:5] [train], epoch: 0/1, iter: 300/625, loss: 0.86781, top1: 0.00059, throughput: 377.88 | 2022-04-27 10:51:42.817 [rank:1] [train], epoch: 0/1, iter: 300/625, loss: 0.86755, top1: 0.00086, throughput: 377.89 | 2022-04-27 10:51:42.816 [rank:3] [train], epoch: 0/1, iter: 300/625, loss: 0.86781, top1: 0.00066, throughput: 377.89 | 2022-04-27 10:51:42.817 [rank:4] [train], epoch: 0/1, iter: 300/625, loss: 0.86728, top1: 0.00062, throughput: 377.89 | 2022-04-27 10:51:42.820 [rank:0] [train], epoch: 0/1, iter: 300/625, loss: 0.86769, top1: 0.00055, throughput: 377.88 | 2022-04-27 10:51:42.818 [rank:7] [train], epoch: 0/1, iter: 300/625, loss: 0.86776, top1: 0.00062, throughput: 377.88 | 2022-04-27 10:51:42.818