From cbe2fc71aa451684e2071672093fd7970b307244 Mon Sep 17 00:00:00 2001 From: Henry Webel Date: Mon, 25 Apr 2022 08:09:06 +0200 Subject: [PATCH] callback begin_* -> before_* (#458) --- 16_accel_sgd.ipynb | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/16_accel_sgd.ipynb b/16_accel_sgd.ipynb index f2bc040..a2650ec 100644 --- a/16_accel_sgd.ipynb +++ b/16_accel_sgd.ipynb @@ -998,7 +998,7 @@ "\n", "```python\n", "try:\n", - " self._split(b); self('begin_batch')\n", + " self._split(b); self('before_batch')\n", " self.pred = self.model(*self.xb); self('after_pred')\n", " self.loss = self.loss_func(self.pred, *self.yb); self('after_loss')\n", " if not self.training: return\n", @@ -1027,17 +1027,17 @@ "source": [ "When you want to write your own callback, the full list of available events is:\n", "\n", - "- `begin_fit`:: called before doing anything; ideal for initial setup.\n", - "- `begin_epoch`:: called at the beginning of each epoch; useful for any behavior you need to reset at each epoch.\n", - "- `begin_train`:: called at the beginning of the training part of an epoch.\n", - "- `begin_batch`:: called at the beginning of each batch, just after drawing said batch. It can be used to do any setup necessary for the batch (like hyperparameter scheduling) or to change the input/target before it goes into the model (for instance, apply Mixup).\n", + "- `before_fit`:: called before doing anything; ideal for initial setup.\n", + "- `before_epoch`:: called at the beginning of each epoch; useful for any behavior you need to reset at each epoch.\n", + "- `before_train`:: called at the beginning of the training part of an epoch.\n", + "- `before_batch`:: called at the beginning of each batch, just after drawing said batch. It can be used to do any setup necessary for the batch (like hyperparameter scheduling) or to change the input/target before it goes into the model (for instance, apply Mixup).\n", "- `after_pred`:: called after computing the output of the model on the batch. It can be used to change that output before it's fed to the loss function.\n", "- `after_loss`:: called after the loss has been computed, but before the backward pass. It can be used to add penalty to the loss (AR or TAR in RNN training, for instance).\n", "- `after_backward`:: called after the backward pass, but before the update of the parameters. It can be used to make changes to the gradients before said update (via gradient clipping, for instance).\n", "- `after_step`:: called after the step and before the gradients are zeroed.\n", "- `after_batch`:: called at the end of a batch, to perform any required cleanup before the next one.\n", "- `after_train`:: called at the end of the training phase of an epoch.\n", - "- `begin_validate`:: called at the beginning of the validation phase of an epoch; useful for any setup needed specifically for validation.\n", + "- `before_validate`:: called at the beginning of the validation phase of an epoch; useful for any setup needed specifically for validation.\n", "- `after_validate`:: called at the end of the validation part of an epoch.\n", "- `after_epoch`:: called at the end of an epoch, for any cleanup before the next one.\n", "- `after_fit`:: called at the end of training, for final cleanup.\n", @@ -1059,8 +1059,8 @@ "outputs": [], "source": [ "class ModelResetter(Callback):\n", - " def begin_train(self): self.model.reset()\n", - " def begin_validate(self): self.model.reset()" + " def before_train(self): self.model.reset()\n", + " def before_validate(self): self.model.reset()" ] }, {