pytorch RNN 분류

30164 단어 nlppytorch

데이터 로드(단순)

from __future__ import unicode_literals, print_function, division
from io import open
import glob
import os
import torch
def findFiles(path): return glob.glob(path)

#print(findFiles('data/names/*.txt'))

import unicodedata
import string

all_letters = string.ascii_letters + " .,;'"
n_letters = len(all_letters)
print(all_letters,n_letters)
# Turn a Unicode string to plain ASCII, thanks to https://stackoverflow.com/a/518232/2809427
def unicodeToAscii(s):
    return ''.join(
        c for c in unicodedata.normalize('NFD', s)
        if unicodedata.category(c) != 'Mn'
        and c in all_letters
    )

print(unicodeToAscii('Ślusàrski'))

데이터 프리프로세싱(인명을 문자로 tensor로 변환)

# Build the category_lines dictionary, a list of names per language
category_lines = {}# 
all_categories = []

# Read a file and split into lines
def readLines(filename):
    lines = open(filename, encoding='utf-8').read().strip().split('
'
) #print([unicodeToAscii(line) for line in lines]) return [unicodeToAscii(line) for line in lines] for filename in findFiles('data/names/*.txt'): category = os.path.splitext(os.path.basename(filename))[0] all_categories.append(category) lines = readLines(filename)# list category_lines[category] = lines# , list n_categories = len(all_categories) print(all_categories) # Find letter index from all_letters, e.g. "a" = 0 def letterToIndex(letter):# index return all_letters.find(letter) # Just for demonstration, turn a letter into a <1 ,n_letters> Tensor,one-hot def letterToTensor(letter): tensor = torch.zeros(1, n_letters) tensor[0][letterToIndex(letter)] = 1# , index 1, 0 return tensor # Turn a line into a , # or an array of one-hot letter vectors def lineToTensor(line): tensor = torch.zeros(len(line), 1, n_letters) for li, letter in enumerate(line):# , letterToTensor(1*n_letters) tensor[li][0][letterToIndex(letter)] = 1# line_length*1*n_letters return tensor print(letterToTensor('J')) print(lineToTensor('Jones').size())
def categoryFromOutput(output):
    top_n, top_i = output.topk(1)# (18 18 ), 
    category_i = top_i[0].item()# 18 index
    return all_categories[category_i], category_i

print(categoryFromOutput(output))
import random

def randomChoice(l):
    temp=l[random.randint(0, len(l) - 1)]
    print(temp)
    return temp

def randomTrainingExample():
    category = randomChoice(all_categories)# 
    line = randomChoice(category_lines[category])#dict key category,value list, list 
    category_tensor = torch.tensor([all_categories.index(category)], dtype=torch.long)# index
    line_tensor = lineToTensor(line)# 
    return category, line, category_tensor, line_tensor

for i in range(10):
    category, line, category_tensor, line_tensor = randomTrainingExample()
    print('category =', category, '/ line =', line)
 ( ):
category = Korean / line(random_name) = Cho
category = Portuguese / line(random_name) = Castro
category = Polish / line(random_name) = Niemczyk
category = Russian / line(random_name) = Yaminsky
category = Spanish / line(random_name) = Marti
category = Portuguese / line(random_name) = Esteves
category = Vietnamese / line(random_name) = Vinh
category = French / line(random_name) = Laurent
category = Italian / line(random_name) = Napoletani
category = Spanish / line(random_name) = Gallego

네트워크 구축

import torch.nn as nn

class RNN(nn.Module):
    def __init__(self, input_size, hidden_size, output_size):
        super(RNN, self).__init__()

        self.hidden_size = hidden_size

        self.i2h = nn.Linear(input_size + hidden_size, hidden_size)
        self.i2o = nn.Linear(input_size + hidden_size, output_size)
        self.softmax = nn.LogSoftmax(dim=1)

    def forward(self, input, hidden):
        combined = torch.cat((input, hidden), 1)#RNN ,combined= + 
        hidden = self.i2h(combined)
        output = self.i2o(combined)
        output = self.softmax(output)
        return output, hidden

    def initHidden(self):
        return torch.zeros(1, self.hidden_size)

n_hidden = 128
rnn = RNN(n_letters, n_hidden, n_categories)

트레이닝 네트워크

learning_rate = 0.005 # If you set this too high, it might explode. If too low, it might not learn
def categoryFromOutput(output):
    top_n, top_i = output.topk(1)# (18 18 ), 
    category_i = top_i[0].item()# 18 index
    return all_categories[category_i], category_i

def train(category_tensor, line_tensor):
    hidden = rnn.initHidden()
    rnn.zero_grad()

    for i in range(line_tensor.size()[0]):
        output, hidden = rnn(line_tensor[i], hidden)# hidden RNN 

    loss = criterion(output, category_tensor)
    loss.backward()

    # Add parameters' gradients to their values, multiplied by learning rate
    for p in rnn.parameters():
        p.data.add_(-learning_rate, p.grad.data)

    return output, loss.item()

########################
n_iters = 100000
print_every = 5000

# Keep track of losses for plotting
current_loss = 0
all_losses = []

for iter in range(1, n_iters + 1):
    category, line, category_tensor, line_tensor = randomTrainingExample()# 
    output, loss = train(category_tensor, line_tensor)# 
    current_loss += loss# loss

    #    output, categoryFromOutput category, category 
    if iter % print_every == 0:
        guess, guess_i = categoryFromOutput(output)
        correct = '✓' if guess == category else '✗ (%s)' % category
        print('%d %d%% (%s) %.4f %s / %s %s' % (iter, iter / n_iters * 100, timeSince(start), loss, line, guess, correct))

좋은 웹페이지 즐겨찾기