-
Notifications
You must be signed in to change notification settings - Fork 1
/
__main__.py
108 lines (73 loc) · 2.51 KB
/
__main__.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
import random
import numpy as np
import keras
from keras.models import Model
from keras.models import load_model
from keras.models import Sequential
from keras.layers import Input, Lambda
from keras.optimizers import Adam, SGD, Nadam, RMSprop
from keras.layers.core import Dense, Activation, Dropout, Flatten
from siamesenet import SiameseNetwork
from utils import contrastive_loss
# Training parameters
batch_size = 128
n_epochs = 999999
checkpoint_path = "./checkpoint"
log_path = "./log"
validation_split = 0.2
# Siamese network parameters
n_output = 20 # the size of embeddings (output of the encoder model)
learning_rate = 1e-2
def create_encoder_model(input_shape):
""" Encoder network to be shared
:param input_shape: the shape of the input layer
:type int
:return: the model structure
:rtype: Keras Sequential model
"""
# define model
model = Sequential()
model.add(Dense(128, input_dim=input_shape, name='fc1')) #, activation='relu'
#model.add(BatchNormalization())
model.add(Activation('relu'))
#model.add(LeakyReLU())
model.add(Dropout(0.1))
model.add(Dense(128, name='fc2'))
#model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Dropout(0.1))
model.add(Dense(n_output, activation='relu', name='fc4'))
#model.add(BatchNormalization())
#model.add(Activation('relu'))
#kernel_initializer='random_uniform'
return model
""" Randomly generating dummy training data for the Siamese network
"""
n_examples = 7500
n_features = 25
x_train_1 = np.random.random((n_examples, n_features))
x_train_2 = np.random.random((n_examples, n_features))
y_train = np.random.random((n_examples))
x_test = np.random.random((n_examples, n_features))
input_shape = n_features
""" Building the Siamese Network
"""
encoder_model = create_encoder_model(input_shape)
siamese_model = SiameseNetwork(encoder_model)
""" Compile
"""
# Define the optimizer and compile the model
rms = RMSprop()#learning_rate=0.001)
sgd = SGD(lr=learning_rate, momentum=0.9, decay=0, nesterov=True)
adam = Adam()
nadam = Nadam(lr=learning_rate)
siamese_model.compile(loss=contrastive_loss, optimizer=adam, metrics=['mae'])
""" Training
"""
siamese_model.fit(x_train_1, x_train_2, y_train, epochs=n_epochs, batch_size=batch_size)
""" Using the trained encoder
Extracting new feature embeddings from test data
"""
model = siamese_model.restore(encoder_model=encoder_model, checkpoint_path=checkpoint_path)
print(model.summary())
new_embeddings = model.predict(x_test)