fix: deadlock if processes have different log_buffer (#252)

pull/254/head^2
Harry 2020-04-27 22:44:26 +08:00 committed by GitHub
parent 0c34ab90d0
commit c203419f57
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 5 additions and 2 deletions

View File

@ -98,13 +98,16 @@ class TextLoggerHook(LoggerHook):
log_dict['iter'] = runner.inner_iter + 1
# only record lr of the first param group
log_dict['lr'] = runner.current_lr()[0]
memory = None
if torch.cuda.is_available():
memory = self._get_max_memory(runner)
if mode == 'train':
log_dict['time'] = runner.log_buffer.output['time']
log_dict['data_time'] = runner.log_buffer.output['data_time']
# statistic memory
if torch.cuda.is_available():
log_dict['memory'] = self._get_max_memory(runner)
if memory is not None:
log_dict['memory'] = memory
for name, val in runner.log_buffer.output.items():
if name in ['time', 'data_time']:
continue