import numpy as np
import scipy.io as scio
import torch
import torch.nn as nn
import torchvision
import torchvision.transforms as transforms
from torch.autograd import Variable
class simpleLSTM(nn.Module):
def __init__(self,input_size,hidden_size,output_size,num_layers=1):
super(simpleLSTM,self).__init__()
self.hidden_size=hidden_size
self.num_layers=num_layers
self.embedding=nn.Embedding(input_size,hidden_size)
self.lstm=nn.LSTM(hidden_size,hidden_size,num_layers,batch_first=True)
self.fc=nn.Linear(hidden_size,output_size)
self.softmax=nn.LogSoftmax(dim=1)
def forward(self,input,hidden):
x=self.embedding(input)
output,hidden=self.lstm(x,hidden)
output=output[:,-1,:]
output=self.fc(output)
output=self.softmax(output)
return output,hidden
def initHidden(self):
hidden=Variable(torch.zeros( self.num_layers,1,self.hidden_size ))
cell=Variable(torch.zeros(self.num_layers,1,self.hidden_size))
return (hidden,cell)
简单的LSTM神经网络
猜你喜欢
转载自blog.csdn.net/weixin_42528089/article/details/103864723
今日推荐
周排行