代码拉取完成,页面将自动刷新
import torch
import torch.nn as nn
class Net(nn.Module):
def __init__(self, vocab_size, embedded_size, num_hiddens, num_layers, num_classes, **kwargs):
super(Net, self).__init__()
self.vocab_size = vocab_size
self.embedded_size = embedded_size
self.num_hiddens = num_hiddens
self.num_layers = num_layers
self.num_classes = num_classes
self.embedding = nn.Embedding(self.vocab_size, embedded_size)
self.encoder = nn.LSTM(input_size=embedded_size, hidden_size=self.num_hiddens,
num_layers=self.num_layers)
self.decoder = nn.Linear(num_hiddens*2, self.num_classes)
def forward(self, x):
embeddings = self.embedding(x)
states, hidden = self.encoder(embeddings.permute([1,0,2]))
encodding = torch.cat([states[0], states[-1]], dim=1)
outs = self.decoder(encodding)
return outs
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。