-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathdiscriminator.py
More file actions
66 lines (52 loc) · 2.33 KB
/
discriminator.py
File metadata and controls
66 lines (52 loc) · 2.33 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import numpy as np
import os
import PIL
import tensorflow as tf
from tensorflow.keras import layers, Sequential
from tensorflow.keras.layers import Flatten
import time
class Discriminator:
def __init__(self,input_shape):
self.input_shape = input_shape
def build_discriminator(self):
model = Sequential()
model.add(layers.Conv2D(64, (3, 3), strides=(1, 1), padding='same',
input_shape=self.input_shape))
model.add(layers.LeakyReLU())
# Block 1
model.add(layers.Conv2D(64, (3, 3), strides=(2, 2), padding='same'))
model.add(layers.BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001))
model.add(layers.LeakyReLU())
# Block 2
model.add(layers.Conv2D(128, (3, 3), strides=(1, 1), padding='same'))
model.add(layers.BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001))
model.add(layers.LeakyReLU())
# Block 3
model.add(layers.Conv2D(128, (3, 3), strides=(2, 2), padding='same'))
model.add(layers.BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001))
model.add(layers.LeakyReLU())
# Block 4
model.add(layers.Conv2D(256, (3, 3), strides=(1, 1), padding='same'))
model.add(layers.BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001))
model.add(layers.LeakyReLU())
# Block 5
model.add(layers.Conv2D(256, (3, 3), strides=(2, 2), padding='same'))
model.add(layers.BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001))
model.add(layers.LeakyReLU())
# Block 6
model.add(layers.Conv2D(512, (3, 3), strides=(1, 1), padding='same'))
model.add(layers.BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001))
model.add(layers.LeakyReLU())
# Block 7
model.add(layers.Conv2D(512, (3, 3), strides=(2, 2), padding='same'))
model.add(layers.BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001))
model.add(layers.LeakyReLU())
model.add(Flatten())
model.add(layers.Dense(1024))
model.add(layers.LeakyReLU())
model.add(layers.Dense(1,activation='sigmoid'))
return model
def loss_function(self,logits_fake: tf.Tensor, logits_real: tf.Tensor) -> tf.Tensor:
D_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=tf.zeros_like(logits_fake), logits=logits_fake))
D_loss += tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=tf.ones_like(logits_real), logits=logits_real))
return D_loss