6,169
社区成员




请教一下大神们,我用pytorch跑LSTM,为什么不管跑多少轮网络参数都基本不变,Loss也不会变化呢?用的一个最基础的网络(本来是一个LSTM自编码器,后来找问题一直拆到只剩一个LSTM让输入和输出逼近,还是有问题)。
有没有大神懂的麻烦帮忙看看,万分感谢!
import torch from torch import nn class Network(nn.Module): def __init__(self): super().__init__() self.rnn = nn.RNN( input_size=5, hidden_size=5, num_layers=1, batch_first=True ) def forward(self, x): y, h = self.rnn(x) return y model = Network() Loss_fn = nn.MSELoss() optimizer = torch.optim.Adam(model.parameters(), lr=0.01) a = torch.zeros(3, 2, 5) a[0, :, :] = torch.tensor([[[25.5, 28.7, 14.3, 63.5, 27.0], [15.5, 37.6, 43.3, 22.6, 54.4]]]) a[1, :, :] = a[0, :, :] + 15 a[2, :, :] = a[0, :, :] + 30 for epoch in range(10): for n in range(3): data_in = a[n:n+1, :, :] data_pred = model(data_in) loss = Loss_fn(data_pred, data_in) optimizer.zero_grad() loss.backward() optimizer.step()