Deep Learning for Text with PyTorch
Shubham Jain
Instructor


class SentimentAnalysisCNN(nn.Module):def __init__(self, vocab_size, embed_dim):super().__init__()self.embedding = nn.Embedding(vocab_size, embed_dim)self.conv = nn.Conv1d(embed_dim, embed_dim, kernel_size=3, stride=1, padding=1)self.fc = nn.Linear(embed_dim, 2) ...
__init__ method configures the architecturesuper() initializes the base class nn.Modulenn.Embedding creates dense word vectorsnn.Conv1d for one dimensional data... def forward(self, text): embedded = self.embedding(text).permute(0, 2, 1)conved = F.relu(self.conv(embedded))conved = conved.mean(dim=2)return self.fc(conved)
vocab = ["i", "love", "this", "book", "do", "not", "like"] word_to_idx = {word: i for i, word in enumerate(vocab)}vocab_size = len(word_to_ix)embed_dim = 10book_samples = [ ("The story was captivating and kept me hooked until the end.".split(),1), ("I found the characters shallow and the plot predictable.".split(),0) ]model = SentimentAnalysisCNN(vocab_size, embed_dim) criterion = nn.CrossEntropyLoss() optimizer = optim.SGD(model.parameters(), lr=0.1)
for epoch in range(10): for sentence, label in data:model.zero_grad()sentence = torch.LongTensor([word_to_idx.get(w, 0) for w in sentence]).unsqueeze(0)outputs = model(sentence) label = torch.LongTensor([int(label)])loss = criterion(outputs, label) loss.backward()optimizer.step()
for sample in book_samples:input_tensor = torch.tensor([word_to_idx[w] for w in sample], dtype=torch.long).unsqueeze(0)outputs = model(input_tensor)_, predicted_label = torch.max(outputs.data, 1)sentiment = "Positive" if predicted_label.item() == 1 else "Negative"print(f"Book Review: {' '.join(sample)}") print(f"Sentiment: {sentiment}\n")
Book Review: The story was captivating and kept me hooked until the end
Sentiment: Positive
Book Review: I found the characters shallow and the plot predictable
Sentiment: Negative
Deep Learning for Text with PyTorch