Skip to content

Instantly share code, notes, and snippets.

@simopal6
Created February 1, 2018 09:04
Show Gist options
  • Save simopal6/96e1e9058d8ddbe4d44e8ad39a92c976 to your computer and use it in GitHub Desktop.
Save simopal6/96e1e9058d8ddbe4d44e8ad39a92c976 to your computer and use it in GitHub Desktop.
import torch
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
class MyLoss(torch.nn.Module):
def __init__(self):
super(MyLoss, self).__init__()
def forward(self, output, target):
return torch.sum(output)
net = nn.Linear(5,5)
optimizer = optim.SGD(net.parameters(), lr=0.01)
criterion = MyLoss()
optimizer.zero_grad()
input = Variable(torch.randn(1,5))
pred = net(input)
loss = criterion(pred, None)
loss.backward()
print(net.weight.grad)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment