Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
}
if optimizer is not None:
update_params[('lstm_text', 'optimizer')] = optimizer
if loss is not None:
update_params[('lstm_text', 'loss')] = loss
if pad_length is not None:
update_params[('text_padder', 'pad_length')] = pad_length
update_params[('lstm_text', 'pad_length')] = pad_length
self.update_fixed_hyperparams(update_params)
class LstmTextRegressor(MLPipeline):
BLOCKS = ['tokenizer', 'sequence_padder', 'lstm_text']
def __init__(self, optimizer=None, loss=None):
super(LstmTextRegressor, self).__init__()
update_params = dict()
if optimizer is not None:
update_params[('lstm_text', 'optimizer')] = optimizer
if loss is not None:
update_params[('lstm_text', 'loss')] = loss
self.update_fixed_hyperparams(update_params)
from mlblocks.ml_pipeline.ml_pipeline import MLPipeline
class TraditionalImagePipeline(MLPipeline):
"""Traditional image pipeline using HOG features."""
BLOCKS = ['HOG', 'random_forest_classifier']
from mlblocks.ml_pipeline.ml_pipeline import MLPipeline
class LstmTextClassifier(MLPipeline):
"""LSTM text pipeline via Keras.
From:
http://www.developintelligence.com/blog/2017/06/practical-neural-networks-keras-classifying-yelp-reviews/
""" # noqa
BLOCKS = ['tokenizer', 'sequence_padder', 'lstm_text', 'convert_class_probs']
def __init__(self, num_classes, pad_length=None, optimizer=None, loss=None):
super(LstmTextClassifier, self).__init__()
update_params = {
('lstm_text', 'dense_units'): num_classes,
('lstm_text', 'dense_activation'): 'softmax',
('lstm_text', 'optimizer'): 'keras.optimizers.Adadelta',
('lstm_text', 'loss'): 'keras.losses.categorical_crossentropy'
from mlblocks.ml_pipeline.ml_pipeline import MLPipeline
class RandomForestClassifier(MLPipeline):
"""Random forest classifier pipeline."""
BLOCKS = ['random_forest_classifier']
class RandomForestRegressor(MLPipeline):
"""Random forest classifier pipeline."""
BLOCKS = ['random_forest_regressor']
update_params = {
('simple_cnn', 'dense2_units'): num_classes,
('simple_cnn', 'dense2_activation'): 'softmax',
('simple_cnn', 'optimizer'): 'keras.optimizers.Adadelta',
('simple_cnn', 'loss'): 'keras.losses.categorical_crossentropy'
}
if optimizer is not None:
update_params[('simple_cnn', 'optimizer')] = optimizer
if loss is not None:
update_params[('simple_cnn', 'loss')] = loss
self.update_fixed_hyperparams(update_params)
class SimpleCnnRegressor(MLPipeline):
BLOCKS = ['simple_cnn']
def __init__(self, optimizer=None, loss=None):
super(SimpleCnnRegressor, self).__init__()
update_params = {}
if optimizer is not None:
update_params[('simple_cnn', 'optimizer')] = optimizer
if loss is not None:
update_params[('simple_cnn', 'loss')] = loss
self.update_fixed_hyperparams(update_params)
from mlblocks.ml_pipeline.ml_pipeline import MLPipeline
class TraditionalTextPipeline(MLPipeline):
"""
Traditional text pipeline.
"""
def __new__(cls, *args, **kwargs):
return MLPipeline.from_ml_json([
'count_vectorizer', 'to_array', 'tfidf_transformer',
'multinomial_nb'
])
from mlblocks.ml_pipeline.ml_pipeline import MLPipeline
class SimpleCnnClassifier(MLPipeline):
"""CNN image pipeline.
Based on:
https://github.com/keras-team/keras/blob/master/examples/mnist_cnn.py
Layers:
Conv2D
Conv2D
MaxPooling2D
Dropout
Flatten
Dense
Dropout
Dense
"""
BLOCKS = ['simple_cnn', 'convert_class_probs']