This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class ConvUpsampling(nn.Module): | |
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0): | |
super(ConvUpsampling, self).__init__() | |
self.scale_factor = kernel_size | |
self.conv = nn.Sequential( | |
nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding, bias=False), | |
nn.BatchNorm2d(out_channels), | |
nn.LeakyReLU() | |
) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
vgg = VGG(pretrained=True) | |
vgg.eval() | |
def get_output(): | |
def hook(model, input, output): | |
model.output = output | |
return hook | |
layer = [2,5,9] | |
for i in layer: |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def kld_loss(mu, logvar): | |
return (-0.5 * torch.mean(1 + logvar - mu.pow(2) - logvar.exp())) | |
def total_loss(img, recon, mu, logvar): | |
return l1_loss(recon, img) + kld_loss(mu, logvar) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class Conv(nn.Module): | |
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0): | |
super(Conv, self).__init__() | |
self.conv = nn.Sequential( | |
nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding, bias=False), | |
nn.BatchNorm2d(out_channels), | |
nn.LeakyReLU() | |
) |