Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
modelFeatures = pd.read_csv(filepath_or_buffer=inputCSVFilePath,usecols=SMARTparameters)
modelLabel = pd.read_csv(filepath_or_buffer=inputCSVFilePath,usecols=['failure']) #"/hadoop/elephas/Output/ST4000DM000.csv"
# Removing Not A Number values from the Input Dataframe
modelFeatures = modelFeatures.fillna(0)
modelLabel = modelLabel.fillna(0)
# Obtaining 3D training and testing vectors
(feature_train, label_train), (feature_test, label_test) = lstm.train_test_split(modelFeatures,modelLabel,trainSize,timeSteps)
# Condition to check whether the failure cases exists in the data
if len(feature_train)==0:
print("DiskModel has no failure eleements. Training of the model cannot proceed!!")
return
# Initializing the Adam Optimizer for Elephas
adam = elephas_optimizers.Adam()
print "Adam Optimizer initialized"
#Converting Dataframe to Spark RDD
rddataset = to_simple_rdd(sc, feature_train, label_train)
print "Training data converted into Resilient Distributed Dataset"
#Initializing the SparkModel with Optimizer,Master-Worker Mode and Number of Workers
spark_model = SparkModel(sc,lstmModel,optimizer=adam ,frequency='epoch', mode='asynchronous', num_workers=2)
print "Spark Model Initialized"
#Initial training run of the model
spark_model.train(rddataset, nb_epoch=10, batch_size=200, verbose=1, validation_split=0)
# Saving the model
score = spark_model.evaluate(feature_test, label_test,show_accuracy=True)
while(score <= 0.5):
# Training the Input Data set
spark_model.train(rddataset, nb_epoch=10, batch_size=200, verbose=1, validation_split=0)
print "LSTM model training done !!"
return new_weights
def get_config(self):
return {"class_name": self.__class__.__name__,
"lr": float(self.lr),
"beta_1": self.beta_1,
"beta_2": self.beta_2,
"epsilon": self.epsilon}
# aliases
sgd = SGD
rmsprop = RMSprop
adagrad = Adagrad
adadelta = Adadelta
adam = Adam
def serialize(optimizer):
return serialize_keras_object(optimizer)
def deserialize(config, custom_objects=None):
"""Inverse of the `serialize` function.
# Arguments
config: Optimizer configuration dictionary.
custom_objects: Optional dictionary mapping
names (strings) to custom objects
(classes and functions)
to be considered during deserialization.
# Returns
A Keras Optimizer instance.
def __init__(self, lr=0.001, beta_1=0.9, beta_2=0.999,
epsilon=1e-8, *args, **kwargs):
super(Adam, self).__init__(**kwargs)
self.__dict__.update(locals())
self.iterations = 0
self.lr = lr
"""Inverse of the `serialize` function.
# Arguments
config: Optimizer configuration dictionary.
custom_objects: Optional dictionary mapping
names (strings) to custom objects
(classes and functions)
to be considered during deserialization.
# Returns
A Keras Optimizer instance.
"""
all_classes = {
'sgd': SGD,
'rmsprop': RMSprop,
'adagrad': Adagrad,
'adadelta': Adadelta,
'adam': Adam
}
# Make deserialization case-insensitive for built-in optimizers.
if config['class_name'].lower() in all_classes:
config['class_name'] = config['class_name'].lower()
return deserialize_keras_object(config,
module_objects=all_classes,
custom_objects=custom_objects,
printable_module_name='optimizer')