forked from JulieProst/keras-tuner-tutorial
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathhypermodels.py
81 lines (77 loc) · 2.46 KB
/
hypermodels.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
from kerastuner import HyperModel
from tensorflow import keras
from tensorflow.keras.layers import Conv2D, Dense, Dropout, Flatten, MaxPooling2D
class CNNHyperModel(HyperModel):
def __init__(self, input_shape, num_classes):
self.input_shape = input_shape
self.num_classes = num_classes
def build(self, hp):
model = keras.Sequential()
model.add(
Conv2D(
filters=16,
kernel_size=3,
activation="relu",
input_shape=self.input_shape,
)
)
model.add(Conv2D(filters=16, activation="relu", kernel_size=3))
model.add(MaxPooling2D(pool_size=2))
model.add(
Dropout(
rate=hp.Float(
"dropout_1", min_value=0.0, max_value=0.5, default=0.25, step=0.05,
)
)
)
model.add(Conv2D(filters=32, kernel_size=3, activation="relu"))
model.add(
Conv2D(
filters=hp.Choice("num_filters", values=[32, 64], default=64,),
activation="relu",
kernel_size=3,
)
)
model.add(MaxPooling2D(pool_size=2))
model.add(
Dropout(
rate=hp.Float(
"dropout_2", min_value=0.0, max_value=0.5, default=0.25, step=0.05,
)
)
)
model.add(Flatten())
model.add(
Dense(
units=hp.Int(
"units", min_value=32, max_value=512, step=32, default=128
),
activation=hp.Choice(
"dense_activation",
values=["relu", "tanh", "sigmoid"],
default="relu",
),
)
)
model.add(
Dropout(
rate=hp.Float(
"dropout_3", min_value=0.0, max_value=0.5, default=0.25, step=0.05
)
)
)
model.add(Dense(self.num_classes, activation="softmax"))
model.compile(
optimizer=keras.optimizers.Adam(
hp.Float(
"learning_rate",
min_value=1e-4,
max_value=1e-2,
sampling="LOG",
default=1e-3,
)
),
loss="sparse_categorical_crossentropy",
metrics=["accuracy"],
)
return model