Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
network = max_pool_2d(network, 3, strides=2)
network = local_response_normalization(network)
network = conv_2d(network, 256, 5, activation='relu')
network = max_pool_2d(network, 3, strides=2)
network = local_response_normalization(network)
network = conv_2d(network, 384, 3, activation='relu')
network = conv_2d(network, 384, 3, activation='relu')
network = conv_2d(network, 256, 3, activation='relu')
network = max_pool_2d(network, 3, strides=2)
network = local_response_normalization(network)
network = fully_connected(network, 4096, activation='tanh')
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation='tanh')
network = dropout(network, 0.5)
network = fully_connected(network, 17, activation='softmax')
network = regression(network, optimizer='momentum',
loss='categorical_crossentropy',
learning_rate=0.001)
# Training
model = tflearn.DNN(network, checkpoint_path='model_alexnet',
max_checkpoints=1, tensorboard_verbose=2)
model.fit(X, Y, n_epoch=1000, validation_set=0.1, shuffle=True,
show_metric=True, batch_size=64, snapshot_step=200,
snapshot_epoch=False, run_id='alexnet_oxflowers17')
img_aug.add_random_flip_leftright()
img_aug.add_random_rotation(max_angle=25.)
# Convolutional network building
network = input_data(shape=[None, 32, 32, 3],
data_preprocessing=img_prep,
data_augmentation=img_aug)
network = conv_2d(network, 32, 3, activation='relu')
network = max_pool_2d(network, 2)
network = conv_2d(network, 64, 3, activation='relu')
network = conv_2d(network, 64, 3, activation='relu')
network = max_pool_2d(network, 2)
network = fully_connected(network, 512, activation='relu')
network = dropout(network, 0.5)
network = fully_connected(network, 10, activation='softmax')
network = regression(network, optimizer='adam',
loss='categorical_crossentropy',
learning_rate=0.001)
# Train using classifier
model = tflearn.DNN(network, tensorboard_verbose=0)
model.fit(X, Y, n_epoch=50, shuffle=True, validation_set=(X_test, Y_test),
show_metric=True, batch_size=96, run_id='cifar10_cnn')
network = conv_2d(network, 512, 3, activation='relu')
network = conv_2d(network, 512, 3, activation='relu')
network = max_pool_2d(network, 2, strides=2)
network = conv_2d(network, 512, 3, activation='relu')
network = conv_2d(network, 512, 3, activation='relu')
network = conv_2d(network, 512, 3, activation='relu')
network = max_pool_2d(network, 2, strides=2)
network = fully_connected(network, 4096, activation='relu')
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation='relu')
network = dropout(network, 0.5)
network = fully_connected(network, 17, activation='softmax')
network = regression(network, optimizer='rmsprop',
loss='categorical_crossentropy',
learning_rate=0.0001)
# Training
model = tflearn.DNN(network, checkpoint_path='model_vgg',
max_checkpoints=1, tensorboard_verbose=0)
model.fit(X, Y, n_epoch=500, shuffle=True,
show_metric=True, batch_size=32, snapshot_step=500,
snapshot_epoch=False, run_id='vgg_oxflowers17')
X = X.reshape(-1, 28, 28, 1)
test_x = test_x.reshape([-1, 28, 28, 1])
convnet = input_data(shape=[None, 28, 28, 1], name='input')
convnet = conv_2d(convnet, 32, 2, activation='relu')
convnet = max_pool_2d(convnet, 2)
convnet = conv_2d(convnet, 64, 2, activation='relu')
convnet = max_pool_2d(convnet, 2)
convnet = fully_connected(convnet, 1024, activation='relu')
convnet = dropout(convnet, 0.8)
convnet = fully_connected(convnet, 10, activation='softmax')
convnet = regression(convnet, optimizer='adam', learning_rate=0.01, loss='categorical_crossentropy', name='targets')
model = tflearn.DNN(convnet)
model.fit({'input':X},{'targets':Y}, n_epoch=10,
validation_set=({'input':test_x},{'targets':test_y}),
snapshot_step=500, show_metric=True, run_id='mnist')
model.save('/output/tflearncnn.model')
elif "GRU_3/GRU_3/GRUCell/Candidate/Linear/Matrix" in v.name :
drug_gru_2_candidate_matrix.append(v)
elif "GRU_3/GRU_3/GRUCell/Gates/Linear/Bias" in v.name :
drug_gru_2_gate_bias.append(v)
elif "GRU_3/GRU_3/GRUCell/Candidate/Linear/Bias" in v.name :
drug_gru_2_candidate_bias.append(v)
elif "Embedding_1" in v.name:
drug_embd_W.append(v)
merging = merge([prot_reshape_6,drug_reshape_6],mode='concat',axis=1)
fc_1 = fully_connected(merging, 600, activation='leakyrelu',weights_init="xavier",name='fully1')
drop_2 = dropout(fc_1, 0.8)
fc_2 = fully_connected(drop_2, 300, activation='leakyrelu',weights_init="xavier",name='fully2')
drop_3 = dropout(fc_2, 0.8)
linear = fully_connected(drop_3, 1, activation='linear',name='fully3')
reg = regression(linear, optimizer='adam', learning_rate=0.001,
loss='mean_square', name='target')
# Training
model = tflearn.DNN(reg, tensorboard_verbose=0,tensorboard_dir='./mytensor/',checkpoint_path="./checkpoints/")
######### Setting weights
model.set_weights(prot_embd_W[0],prot_embd_init)
model.set_weights(prot_gru_1_gate_matrix[0],prot_gru_1_gates_kernel_init)
model.set_weights(prot_gru_1_gate_bias[0],prot_gru_1_gates_bias_init)
model.set_weights(prot_gru_1_candidate_matrix[0],prot_gru_1_candidate_kernel_init)
model.set_weights(prot_gru_1_candidate_bias[0],prot_gru_1_candidate_bias_init)
model.set_weights(prot_gru_2_gate_matrix[0],prot_gru_2_gates_kernel_init)
model.set_weights(prot_gru_2_gate_bias[0],prot_gru_2_gates_bias_init)
model.set_weights(prot_gru_2_candidate_matrix[0],prot_gru_2_candidate_kernel_init)
model.set_weights(prot_gru_2_candidate_bias[0],prot_gru_2_candidate_bias_init)
#network = local_response_normalization(network)
network = conv_3d(network, 384, 3, activation='relu')
network = conv_3d(network, 384, 3, activation='relu')
network = conv_3d(network, 256, 3, activation='relu')
network = max_pool_3d(network, 3, strides=2)
#network = local_response_normalization(network)
network = fully_connected(network, 4096, activation='tanh')
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation='tanh')
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation='tanh')
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation='tanh')
network = dropout(network, 0.5)
network = fully_connected(network, output, activation='softmax')
network = regression(network, optimizer='momentum',
loss='categorical_crossentropy',
learning_rate=lr, name='targets')
model = tflearn.DNN(network, checkpoint_path='model_alexnet',
max_checkpoints=1, tensorboard_verbose=0, tensorboard_dir='log')
return model
inception_5b_3_3 = conv_2d(inception_5b_3_3_reduce, 384, filter_size=3,activation='relu', name='inception_5b_3_3')
inception_5b_5_5_reduce = conv_2d(inception_5a_output, 48, filter_size=1, activation='relu', name='inception_5b_5_5_reduce')
inception_5b_5_5 = conv_2d(inception_5b_5_5_reduce,128, filter_size=5, activation='relu', name='inception_5b_5_5' )
inception_5b_pool = max_pool_2d(inception_5a_output, kernel_size=3, strides=1, name='inception_5b_pool')
inception_5b_pool_1_1 = conv_2d(inception_5b_pool, 128, filter_size=1, activation='relu', name='inception_5b_pool_1_1')
inception_5b_output = merge([inception_5b_1_1, inception_5b_3_3, inception_5b_5_5, inception_5b_pool_1_1], axis=3, mode='concat')
pool5_7_7 = avg_pool_2d(inception_5b_output, kernel_size=7, strides=1)
pool5_7_7 = dropout(pool5_7_7, 0.4)
loss = fully_connected(pool5_7_7, output,activation='softmax')
network = regression(loss, optimizer='momentum',
loss='categorical_crossentropy',
learning_rate=lr, name='targets')
model = tflearn.DNN(network,
max_checkpoints=0, tensorboard_verbose=0,tensorboard_dir='log')
return model
def build_network(self):
# Smaller 'AlexNet'
# https://github.com/tflearn/tflearn/blob/master/examples/images/alexnet.py
print('[+] Building CNN')
self.network = input_data(shape=[None, SIZE_FACE, SIZE_FACE, 1])
self.network = conv_2d(self.network, 64, 5, activation='relu')
#self.network = local_response_normalization(self.network)
self.network = max_pool_2d(self.network, 3, strides=2)
self.network = conv_2d(self.network, 64, 5, activation='relu')
self.network = max_pool_2d(self.network, 3, strides=2)
self.network = conv_2d(self.network, 128, 4, activation='relu')
self.network = dropout(self.network, 0.3)
self.network = fully_connected(self.network, 3072, activation='relu')
self.network = fully_connected(
self.network, len(EMOTIONS), activation='softmax')
self.network = regression(
self.network,
optimizer='momentum',
loss='categorical_crossentropy'
)
self.model = tflearn.DNN(
self.network,
checkpoint_path=SAVE_DIRECTORY + '/emotion_recognition',
max_checkpoints=1,
tensorboard_verbose=2
)
self.load_model()
if NETWORK.use_batchnorm_after_fully_connected_layers:
landmarks_network = batch_normalization(landmarks_network)
images_network = fully_connected(images_network, 40, activation=NETWORK.activation)
network = merge([images_network, landmarks_network], 'concat', axis=1)
else:
network = images_network
network = fully_connected(network, NETWORK.output_size, activation='softmax')
if optimizer == 'momentum':
optimizer = Momentum(learning_rate=learning_rate, momentum=optimizer_param,
lr_decay=learning_rate_decay, decay_step=decay_step)
elif optimizer == 'adam':
optimizer = Adam(learning_rate=learning_rate, beta1=optimizer_param, beta2=learning_rate_decay)
else:
print( "Unknown optimizer: {}".format(optimizer))
network = regression(network, optimizer=optimizer, loss=NETWORK.loss, learning_rate=learning_rate, name='output')
return network
network = max_pool_3d(network, 3, strides=2)
#network = local_response_normalization(network)
network = conv_3d(network, 256, 5, activation='relu')
network = max_pool_3d(network, 3, strides=2)
#network = local_response_normalization(network)
network = conv_3d(network, 384, 3, activation='relu')
network = conv_3d(network, 384, 3, activation='relu')
network = conv_3d(network, 256, 3, activation='relu')
network = max_pool_3d(network, 3, strides=2)
#network = local_response_normalization(network)
network = fully_connected(network, 4096, activation='tanh')
network = dropout(network, 0.5)
network = fully_connected(network, 4096, activation='tanh')
network = dropout(network, 0.5)
network = fully_connected(network, 3, activation='softmax')
network = regression(network, optimizer='momentum',
loss='categorical_crossentropy',
learning_rate=lr, name='targets')
model = tflearn.DNN(network, checkpoint_path='model_alexnet',
max_checkpoints=1, tensorboard_verbose=0, tensorboard_dir='log')
return model