imyhxy fkwong commited on
Commit
771ac6c
1 Parent(s): 2d99063

Fixed logging level in distributed mode (#4284)

Browse files

Co-authored-by: fkwong <[email protected]>

Files changed (1) hide show
  1. utils/torch_utils.py +1 -1
utils/torch_utils.py CHANGED
@@ -23,7 +23,6 @@ try:
23
  except ImportError:
24
  thop = None
25
 
26
- logging.basicConfig(format="%(message)s", level=logging.INFO)
27
  LOGGER = logging.getLogger(__name__)
28
 
29
 
@@ -108,6 +107,7 @@ def profile(input, ops, n=10, device=None):
108
  # profile(input, [m1, m2], n=100) # profile over 100 iterations
109
 
110
  results = []
 
111
  device = device or select_device()
112
  print(f"{'Params':>12s}{'GFLOPs':>12s}{'GPU_mem (GB)':>14s}{'forward (ms)':>14s}{'backward (ms)':>14s}"
113
  f"{'input':>24s}{'output':>24s}")
 
23
  except ImportError:
24
  thop = None
25
 
 
26
  LOGGER = logging.getLogger(__name__)
27
 
28
 
 
107
  # profile(input, [m1, m2], n=100) # profile over 100 iterations
108
 
109
  results = []
110
+ logging.basicConfig(format="%(message)s", level=logging.INFO)
111
  device = device or select_device()
112
  print(f"{'Params':>12s}{'GFLOPs':>12s}{'GPU_mem (GB)':>14s}{'forward (ms)':>14s}{'backward (ms)':>14s}"
113
  f"{'input':>24s}{'output':>24s}")