diff --git a/examples/notebooks/TextCNN.ipynb b/examples/notebooks/TextCNN.ipynb index 05b0d662b43..887114f146e 100644 --- a/examples/notebooks/TextCNN.ipynb +++ b/examples/notebooks/TextCNN.ipynb @@ -426,7 +426,7 @@ "batch_size = 8 # A batch size of 8\n", "\n", "def create_iterators(batch_size=8):\n", - " \"\"\"Heler function to create the iterators\"\"\"\n", + " \"\"\"Helper function to create the iterators\"\"\"\n", " dataloaders = []\n", " for split in [train_list, validation_list, test_list]:\n", " dataloader = DataLoader(\n", @@ -695,7 +695,7 @@ "Similar to the training process function, we set up a function to evaluate a single batch. Here is what the eval_function does:\n", "\n", "* Sets model in eval mode.\n", - "* With torch.no_grad(), no gradients are calculated for any succeding steps.\n", + "* With torch.no_grad(), no gradients are calculated for any succeeding steps.\n", "* Generates x and y from batch.\n", "* Performs a forward pass on the model to calculate y_pred based on model and x.\n", "* Returns y_pred and y.\n", @@ -1002,4 +1002,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} \ No newline at end of file +}