-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathdiscri.py
55 lines (46 loc) · 1.98 KB
/
discri.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
import torch
import torch.nn as nn
import torch.nn.functional as F
class DGWGAN(nn.Module):
def __init__(self, in_dim = 3, dim=64):
super(DGWGAN, self).__init__()
def conv_ln_lrelu(in_dim, out_dim):
return nn.Sequential(
nn.Conv2d(in_dim, out_dim, 5, 2, 2),
# Since there is no effective implementation of LayerNorm,
# we use InstanceNorm2d instead of LayerNorm here.
nn.InstanceNorm2d(out_dim, affine=True),
nn.LeakyReLU(0.2))
self.layer1 = nn.Sequential(nn.Conv2d(in_dim, dim, 5, 2, 2), nn.LeakyReLU(0.2))
self.layer2 = conv_ln_lrelu(dim, dim * 2)
self.layer3 = conv_ln_lrelu(dim * 2, dim * 4)
self.layer4 = conv_ln_lrelu(dim * 4, dim * 8)
self.layer5 = nn.Conv2d(dim * 8, 1, 4)
def forward(self, x):
feat1 = self.layer1(x)
feat2 = self.layer2(feat1)
feat3 = self.layer3(feat2)
feat4 = self.layer4(feat3)
y = self.layer5(feat4)
y = y.view(-1)
return [feat1, feat2, feat3, feat4], y
class DLWGAN(nn.Module):
def __init__(self, in_dim = 3, dim=64):
super(DLWGAN, self).__init__()
def conv_ln_lrelu(in_dim, out_dim):
return nn.Sequential(
nn.Conv2d(in_dim, out_dim, 5, 2, 2),
# Since there is no effective implementation of LayerNorm,
# we use InstanceNorm2d instead of LayerNorm here.
nn.InstanceNorm2d(out_dim, affine=True),
nn.LeakyReLU(0.2))
self.layer1 = nn.Sequential(nn.Conv2d(in_dim, dim, 5, 2, 2), nn.LeakyReLU(0.2))
self.layer2 = conv_ln_lrelu(dim, dim * 2)
self.layer3 = conv_ln_lrelu(dim * 2, dim * 4)
self.layer4 = nn.Conv2d(dim * 4, 1, 4)
def forward(self, x):
feat1 = self.layer1(x)
feat2 = self.layer2(feat1)
feat3 = self.layer3(feat2)
y = self.layer4(feat3)
return [feat1, feat2, feat3], y