loop timing

This commit is contained in:
Varuna Jayasiri
2021-01-23 16:07:24 +05:30
parent ebbe704d65
commit 6da6074573
2 changed files with 4 additions and 2 deletions

View File

@ -493,7 +493,7 @@ class Configs(BaseConfigs):
# Loop through epochs
for epoch in monit.loop(self.epochs):
# Loop through the dataset
for i, batch in enumerate(self.dataloader):
for i, batch in monit.enum('Train', self.dataloader):
# Move images to the device
data_x, data_y = batch['x'].to(self.device), batch['y'].to(self.device)
@ -528,6 +528,8 @@ class Configs(BaseConfigs):
# Update learning rates
self.generator_lr_scheduler.step()
self.discriminator_lr_scheduler.step()
# New line
tracker.new_line()
def optimize_generators(self, data_x: torch.Tensor, data_y: torch.Tensor, true_labels: torch.Tensor):
"""

View File

@ -5,7 +5,7 @@ with open("readme.md", "r") as f:
setuptools.setup(
name='labml-nn',
version='0.4.80',
version='0.4.81',
author="Varuna Jayasiri, Nipun Wijerathne",
author_email="vpjayasiri@gmail.com, hnipun@gmail.com",
description="A collection of PyTorch implementations of neural network architectures and layers.",