192 def write(self):
193 storage = get_event_storage()
194 iteration = storage.iter
195
196 try:
197 data_time = storage.history("data_time").avg(20)
198 except KeyError:
199
200
201 data_time = None
202
203 eta_string = None
204 try:
205 iter_time = storage.history("time").global_avg()
206 eta_seconds = storage.history("time").median(1000) * (self._max_iter - iteration)
207 storage.put_scalar("eta_seconds", eta_seconds, smoothing_hint=False)
208 eta_string = str(datetime.timedelta(seconds=int(eta_seconds)))
209 except KeyError:
210 iter_time = None
211
212 if self._last_write is not None:
213 estimate_iter_time = (time.perf_counter() - self._last_write[1]) / (
214 iteration - self._last_write[0]
215 )
216 eta_seconds = estimate_iter_time * (self._max_iter - iteration)
217 eta_string = str(datetime.timedelta(seconds=int(eta_seconds)))
218 self._last_write = (iteration, time.perf_counter())
219
220 try:
221 lr = "{:.2e}".format(storage.history("lr").latest())
222 except KeyError:
223 lr = "N/A"
224
225 if torch.cuda.is_available():
226 max_mem_mb = torch.cuda.max_memory_allocated() / 1024.0 / 1024.0
227 else:
228 max_mem_mb = None
229
230
231 self.logger.info(
232 " {eta}iter: {iter} {losses} {time}{data_time}lr: {lr} {memory}".format(
233 eta=f"eta: {eta_string} " if eta_string else "",
234 iter=iteration,
235 losses=" ".join(
236 [
237 "{}: {:.4g}".format(k, v.median(20))
238 for k, v in storage.histories().items()
239 if "loss" in k
240 ]
241 ),
242 time="time: {:.4f} ".format(iter_time) if iter_time is not None else "",
243 data_time="data_time: {:.4f} ".format(data_time) if data_time is not None else "",
244 lr=lr,
245 memory="max_mem: {:.0f}M".format(max_mem_mb) if max_mem_mb is not None else "",
246 )
247 )
248
249