Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# Sebastian Raschka 2014-2019
# mlxtend Machine Learning Library Extensions
#
# Implementation of the logistic regression algorithm for classification.
# Author: Sebastian Raschka
#
# License: BSD 3 clause
import numpy as np
from time import time
from .._base import _BaseModel
from .._base import _IterativeModel
from .._base import _Classifier
class LogisticRegression(_BaseModel, _IterativeModel, _Classifier):
"""Logistic regression classifier.
Note that this implementation of Logistic Regression
expects binary class labels in {0, 1}.
Parameters
------------
eta : float (default: 0.01)
Learning rate (between 0.0 and 1.0)
epochs : int (default: 50)
Passes over the training dataset.
Prior to each epoch, the dataset is shuffled
if `minibatches > 1` to prevent cycles in stochastic gradient descent.
l2_lambda : float
Regularization parameter for L2 regularization.
# Sebastian Raschka 2014-2019
# mlxtend Machine Learning Library Extensions
#
# Implementation of the ADAptive LInear NEuron classification algorithm.
# Author: Sebastian Raschka
#
# License: BSD 3 clause
import numpy as np
from time import time
from .._base import _BaseModel
from .._base import _IterativeModel
from .._base import _Classifier
class Adaline(_BaseModel, _IterativeModel, _Classifier):
"""ADAptive LInear NEuron classifier.
Note that this implementation of Adaline expects binary class labels
in {0, 1}.
Parameters
------------
eta : float (default: 0.01)
solver rate (between 0.0 and 1.0)
epochs : int (default: 50)
Passes over the training dataset.
Prior to each epoch, the dataset is shuffled
if `minibatches > 1` to prevent cycles in stochastic gradient descent.
minibatches : int (default: None)
The number of minibatches for gradient-based optimization.
# Sebastian Raschka 2014-2019
# mlxtend Machine Learning Library Extensions
#
# Implementation of Rosenblatt's perceptron algorithm for classification.
# Author: Sebastian Raschka
#
# License: BSD 3 clause
import numpy as np
from time import time
from .._base import _BaseModel
from .._base import _IterativeModel
from .._base import _Classifier
class Perceptron(_BaseModel, _IterativeModel, _Classifier):
"""Perceptron classifier.
Note that this implementation of the Perceptron expects binary class labels
in {0, 1}.
Parameters
------------
eta : float (default: 0.1)
Learning rate (between 0.0 and 1.0)
epochs : int (default: 50)
Number of passes over the training dataset.
Prior to each epoch, the dataset is shuffled to prevent cycles.
random_seed : int
Random state for initializing random weights and shuffling.
print_progress : int (default: 0)
# Implementation of Softmax Regression in Tensorflow
# Author: Sebastian Raschka
#
# License: BSD 3 clause
import tensorflow as tf
import numpy as np
from time import time
from .._base import _BaseModel
from .._base import _IterativeModel
from .._base import _MultiClass
from .._base import _Classifier
class TfSoftmaxRegression(_BaseModel, _IterativeModel, _MultiClass,
_Classifier):
"""Softmax regression classifier.
Parameters
------------
eta : float (default: 0.5)
Learning rate (between 0.0 and 1.0)
epochs : int (default: 50)
Passes over the training dataset.
Prior to each epoch, the dataset is shuffled
if `minibatches > 1` to prevent cycles in stochastic gradient descent.
n_classes : int (default: None)
A positive integer to declare the number of class labels
if not all class labels are present in a partial training set.
Gets the number of class labels automatically if None.
minibatches : int (default: 1)
Divide the training data into *k* minibatches