Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# Implementation of the mulitnomial logistic regression algorithm for
# classification.
# Author: Sebastian Raschka
#
# License: BSD 3 clause
import numpy as np
from time import time
from .._base import _BaseModel
from .._base import _IterativeModel
from .._base import _MultiClass
from .._base import _Classifier
class SoftmaxRegression(_BaseModel, _IterativeModel,
_Classifier, _MultiClass):
"""Softmax regression classifier.
Parameters
------------
eta : float (default: 0.01)
Learning rate (between 0.0 and 1.0)
epochs : int (default: 50)
Passes over the training dataset.
Prior to each epoch, the dataset is shuffled
if `minibatches > 1` to prevent cycles in stochastic gradient descent.
l2 : float
Regularization parameter for L2 regularization.
No regularization if l2=0.0.
minibatches : int (default: 1)
# Sebastian Raschka 2014-2019
# mlxtend Machine Learning Library Extensions
#
# Base Regressor (Regressor Parent Class)
# Author: Sebastian Raschka
#
# License: BSD 3 clause
import numpy as np
from time import time
from .._base import _BaseModel
from .._base import _IterativeModel
from .._base import _Regressor
class LinearRegression(_BaseModel, _IterativeModel, _Regressor):
""" Ordinary least squares linear regression.
Parameters
------------
method : string (default: 'direct')
For gradient descent-based optimization, use `sgd` (see `minibatch`
parameter for further options). Otherwise, if `direct` (default),
the analytical method is used. For alternative, numerically more
stable solutions, use either `qr` (QR decomopisition) or `svd`
(Singular Value Decomposition).
eta : float (default: 0.01)
solver learning rate (between 0.0 and 1.0). Used with `method =`
`'sgd'`. (See `methods` parameter for details)
epochs : int (default: 50)
Passes over the training dataset.
# Sebastian Raschka 2014-2019
# mlxtend Machine Learning Library Extensions
#
# Principal Component Analysis for dimensionality reduction.
# Author: Sebastian Raschka
#
# License: BSD 3 clause
import numpy as np
from scipy.spatial import distance
from .._base import _BaseModel
class RBFKernelPCA(_BaseModel):
"""
RBF Kernel Principal Component Analysis for dimensionality reduction.
Parameters
----------
gamma : float (default: 15.0)
Free parameter (coefficient) of the RBF kernel.
n_components : int (default: None)
The number of principal components for transformation.
Keeps the original dimensions of the dataset if `None`.
copy_X : bool (default: True)
Copies training data, which is required to compute the projection
of new data via the transform method. Uses a reference to X if False.
Attributes
----------
# Implementation of a Multi-layer Perceptron in Tensorflow
# Author: Sebastian Raschka
#
# License: BSD 3 clause
import tensorflow as tf
import numpy as np
from time import time
from .._base import _BaseModel
from .._base import _IterativeModel
from .._base import _MultiClass
from .._base import _MultiLayer
from .._base import _Classifier
class TfMultiLayerPerceptron(_BaseModel, _IterativeModel,
_MultiClass, _MultiLayer, _Classifier):
"""Multi-layer perceptron classifier.
Parameters
------------
eta : float (default: 0.5)
Learning rate (between 0.0 and 1.0)
epochs : int (default: 50)
Passes over the training dataset.
Prior to each epoch, the dataset is shuffled
if `minibatches > 1` to prevent cycles in stochastic gradient descent.
hidden_layers : list (default: [50, 10])
Number of units per hidden layer. By default 50 units in the
first hidden layer, and 10 hidden units in the second hidden layer.
n_classes : int (default: None)
A positive integer to declare the number of class labels