| |
| """ |
| NLP From Scratch: Generating Names with a Character-Level RNN |
| ************************************************************* |
| **Author**: `Sean Robertson <https://github.com/spro/practical-pytorch>`_ |
| |
| This is our second of three tutorials on "NLP From Scratch". |
| In the `first tutorial </intermediate/char_rnn_classification_tutorial>` |
| we used a RNN to classify names into their language of origin. This time |
| we'll turn around and generate names from languages. |
| |
| :: |
| |
| > python sample.py Russian RUS |
| Rovakov |
| Uantov |
| Shavakov |
| |
| > python sample.py German GER |
| Gerren |
| Ereng |
| Rosher |
| |
| > python sample.py Spanish SPA |
| Salla |
| Parer |
| Allan |
| |
| > python sample.py Chinese CHI |
| Chan |
| Hang |
| Iun |
| |
| We are still hand-crafting a small RNN with a few linear layers. The big |
| difference is instead of predicting a category after reading in all the |
| letters of a name, we input a category and output one letter at a time. |
| Recurrently predicting characters to form language (this could also be |
| done with words or other higher order constructs) is often referred to |
| as a "language model". |
| |
| **Recommended Reading:** |
| |
| I assume you have at least installed PyTorch, know Python, and |
| understand Tensors: |
| |
| - https://pytorch.org/ For installation instructions |
| - :doc:`/beginner/deep_learning_60min_blitz` to get started with PyTorch in general |
| - :doc:`/beginner/pytorch_with_examples` for a wide and deep overview |
| - :doc:`/beginner/former_torchies_tutorial` if you are former Lua Torch user |
| |
| It would also be useful to know about RNNs and how they work: |
| |
| - `The Unreasonable Effectiveness of Recurrent Neural |
| Networks <https://karpathy.github.io/2015/05/21/rnn-effectiveness/>`__ |
| shows a bunch of real life examples |
| - `Understanding LSTM |
| Networks <https://colah.github.io/posts/2015-08-Understanding-LSTMs/>`__ |
| is about LSTMs specifically but also informative about RNNs in |
| general |
| |
| I also suggest the previous tutorial, :doc:`/intermediate/char_rnn_classification_tutorial` |
| |
| |
| Preparing the Data |
| ================== |
| |
| .. Note:: |
| Download the data from |
| `here <https://download.pytorch.org/tutorial/data.zip>`_ |
| and extract it to the current directory. |
| |
| See the last tutorial for more detail of this process. In short, there |
| are a bunch of plain text files ``data/names/[Language].txt`` with a |
| name per line. We split lines into an array, convert Unicode to ASCII, |
| and end up with a dictionary ``{language: [names ...]}``. |
| |
| """ |
| from __future__ import unicode_literals, print_function, division |
| from io import open |
| import glob |
| import os |
| import unicodedata |
| import string |
|
|
| all_letters = string.ascii_letters + " .,;'-" |
| n_letters = len(all_letters) + 1 |
|
|
| def findFiles(path): return glob.glob(path) |
|
|
| |
| def unicodeToAscii(s): |
| return ''.join( |
| c for c in unicodedata.normalize('NFD', s) |
| if unicodedata.category(c) != 'Mn' |
| and c in all_letters |
| ) |
|
|
| |
| def readLines(filename): |
| lines = open(filename, encoding='utf-8').read().strip().split('\n') |
| return [unicodeToAscii(line) for line in lines] |
|
|
| |
| category_lines = {} |
| all_categories = [] |
| for filename in findFiles('data/names/*.txt'): |
| category = os.path.splitext(os.path.basename(filename))[0] |
| all_categories.append(category) |
| lines = readLines(filename) |
| category_lines[category] = lines |
|
|
| n_categories = len(all_categories) |
|
|
| if n_categories == 0: |
| raise RuntimeError('Data not found. Make sure that you downloaded data ' |
| 'from https://download.pytorch.org/tutorial/data.zip and extract it to ' |
| 'the current directory.') |
|
|
| print('# categories:', n_categories, all_categories) |
| print(unicodeToAscii("O'Néàl")) |
|
|
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| import torch |
| import torch.nn as nn |
|
|
| class RNN(nn.Module): |
| def __init__(self, input_size, hidden_size, output_size): |
| super(RNN, self).__init__() |
| self.hidden_size = hidden_size |
|
|
| self.i2h = nn.Linear(n_categories + input_size + hidden_size, hidden_size) |
| self.i2o = nn.Linear(n_categories + input_size + hidden_size, output_size) |
| self.o2o = nn.Linear(hidden_size + output_size, output_size) |
| self.dropout = nn.Dropout(0.1) |
| self.softmax = nn.LogSoftmax(dim=1) |
|
|
| def forward(self, category, input, hidden): |
| input_combined = torch.cat((category, input, hidden), 1) |
| hidden = self.i2h(input_combined) |
| output = self.i2o(input_combined) |
| output_combined = torch.cat((hidden, output), 1) |
| output = self.o2o(output_combined) |
| output = self.dropout(output) |
| output = self.softmax(output) |
| return output, hidden |
|
|
| def initHidden(self): |
| return torch.zeros(1, self.hidden_size) |
|
|
|
|
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| import random |
|
|
| |
| def randomChoice(l): |
| return l[random.randint(0, len(l) - 1)] |
|
|
| |
| def randomTrainingPair(): |
| category = randomChoice(all_categories) |
| line = randomChoice(category_lines[category]) |
| return category, line |
|
|
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| |
| def categoryTensor(category): |
| li = all_categories.index(category) |
| tensor = torch.zeros(1, n_categories) |
| tensor[0][li] = 1 |
| return tensor |
|
|
| |
| def inputTensor(line): |
| tensor = torch.zeros(len(line), 1, n_letters) |
| for li in range(len(line)): |
| letter = line[li] |
| tensor[li][0][all_letters.find(letter)] = 1 |
| return tensor |
|
|
| |
| def targetTensor(line): |
| letter_indexes = [all_letters.find(line[li]) for li in range(1, len(line))] |
| letter_indexes.append(n_letters - 1) |
| return torch.LongTensor(letter_indexes) |
|
|
|
|
| |
| |
| |
| |
| |
|
|
| |
| def randomTrainingExample(): |
| category, line = randomTrainingPair() |
| category_tensor = categoryTensor(category) |
| input_line_tensor = inputTensor(line) |
| target_line_tensor = targetTensor(line) |
| return category_tensor, input_line_tensor, target_line_tensor |
|
|
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| criterion = nn.NLLLoss() |
|
|
| learning_rate = 0.0005 |
|
|
| def train(category_tensor, input_line_tensor, target_line_tensor): |
| target_line_tensor.unsqueeze_(-1) |
| hidden = rnn.initHidden() |
|
|
| rnn.zero_grad() |
|
|
| loss = 0 |
|
|
| for i in range(input_line_tensor.size(0)): |
| output, hidden = rnn(category_tensor, input_line_tensor[i], hidden) |
| l = criterion(output, target_line_tensor[i]) |
| loss += l |
|
|
| loss.backward() |
|
|
| for p in rnn.parameters(): |
| p.data.add_(p.grad.data, alpha=-learning_rate) |
|
|
| return output, loss.item() / input_line_tensor.size(0) |
|
|
|
|
| |
| |
| |
| |
|
|
| import time |
| import math |
|
|
| def timeSince(since): |
| now = time.time() |
| s = now - since |
| m = math.floor(s / 60) |
| s -= m * 60 |
| return '%dm %ds' % (m, s) |
|
|
|
|
| |
| |
| |
| |
| |
| |
|
|
| rnn = RNN(n_letters, 128, n_letters) |
|
|
| n_iters = 100000 |
| print_every = 5000 |
| plot_every = 500 |
| all_losses = [] |
| total_loss = 0 |
|
|
| start = time.time() |
|
|
| for iter in range(1, n_iters + 1): |
| output, loss = train(*randomTrainingExample()) |
| total_loss += loss |
|
|
| if iter % print_every == 0: |
| print('%s (%d %d%%) %.4f' % (timeSince(start), iter, iter / n_iters * 100, loss)) |
|
|
| if iter % plot_every == 0: |
| all_losses.append(total_loss / plot_every) |
| total_loss = 0 |
|
|
|
|
| |
| |
| |
| |
| |
| |
| |
|
|
| import matplotlib.pyplot as plt |
| import matplotlib.ticker as ticker |
|
|
| plt.figure() |
| plt.plot(all_losses) |
|
|
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| max_length = 20 |
|
|
| |
| def sample(category, start_letter='A'): |
| with torch.no_grad(): |
| category_tensor = categoryTensor(category) |
| input = inputTensor(start_letter) |
| hidden = rnn.initHidden() |
|
|
| output_name = start_letter |
|
|
| for i in range(max_length): |
| output, hidden = rnn(category_tensor, input[0], hidden) |
| topv, topi = output.topk(1) |
| topi = topi[0][0] |
| if topi == n_letters - 1: |
| break |
| else: |
| letter = all_letters[topi] |
| output_name += letter |
| input = inputTensor(letter) |
|
|
| return output_name |
|
|
| |
| def samples(category, start_letters='ABC'): |
| for start_letter in start_letters: |
| print(sample(category, start_letter)) |
|
|
| samples('Russian', 'RUS') |
|
|
| samples('German', 'GER') |
|
|
| samples('Spanish', 'SPA') |
|
|
| samples('Chinese', 'CHI') |
|
|
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|