experiemnt logs

This commit is contained in:
Varuna Jayasiri
2022-05-03 09:12:18 +01:00
parent 4d4cbefe3e
commit aa311eb30d
8 changed files with 613 additions and 499 deletions

View File

@ -74,6 +74,16 @@ class NLPClassificationConfigs(TrainValidConfigs):
# Validation data loader
valid_loader: DataLoader = 'ag_news'
# Whether to log model parameters and gradients (once per epoch).
# These are summarized stats per layer, but it could still lead
# to many indicators for very deep networks.
is_log_model_params_grads: bool = False
# Whether to log model activations (once per epoch).
# These are summarized stats per layer, but it could still lead
# to many indicators for very deep networks.
is_log_model_activations: bool = False
def init(self):
"""
### Initialization
@ -102,7 +112,7 @@ class NLPClassificationConfigs(TrainValidConfigs):
tracker.add_global_step(data.shape[1])
# Whether to capture model outputs
with self.mode.update(is_log_activations=batch_idx.is_last):
with self.mode.update(is_log_activations=batch_idx.is_last and self.is_log_model_activations):
# Get model outputs.
# It's returning a tuple for states when using RNNs.
# This is not implemented yet. 😜
@ -125,7 +135,7 @@ class NLPClassificationConfigs(TrainValidConfigs):
# Take optimizer step
self.optimizer.step()
# Log the model parameters and gradients on last batch of every epoch
if batch_idx.is_last:
if batch_idx.is_last and self.is_log_model_params_grads:
tracker.add('model', self.model)
# Clear the gradients
self.optimizer.zero_grad()