Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
The TPOT library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details. You should have received a copy of the GNU General Public License along
with the TPOT library. If not, see http://www.gnu.org/licenses/.
"""
from .base import Regressor
from sklearn.ensemble import ExtraTreesRegressor
class TPOTExtraTreesRegressor(Regressor):
"""Fits an Extra Trees Regressor
Parameters
----------
criterion: int
Integer that is used to select from the list of valid criteria,
either 'gini', or 'entropy'
max_features: float
The number of features to consider when looking for the best split
"""
import_hash = {'sklearn.ensemble': ['ExtraTreesRegressor']}
sklearn_class = ExtraTreesRegressor
arg_types = (float, )
def __init__(self):
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
The TPOT library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details. You should have received a copy of the GNU General Public License along
with the TPOT library. If not, see http://www.gnu.org/licenses/.
"""
from .base import Regressor
from xgboost import XGBRegressor
class TPOTXGBRegressor(Regressor):
"""Fits an XGBoost Regressor
Parameters
----------
max_depth: int
Maximum tree depth for base learners
min_child_weight: int
Minimum sum of instance weight(hessian) needed in a child
learning_rate: float
Shrinks the contribution of each tree by learning_rate
subsample: float
Subsample ratio of the training instance
"""
import_hash = {'xgboost': ['XGBRegressor']}
sklearn_class = XGBRegressor
arg_types = (int, int, float, float)
any later version.
The TPOT library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details. You should have received a copy of the GNU General Public License along
with the TPOT library. If not, see http://www.gnu.org/licenses/.
"""
from ...gp_types import Bool
from .base import Regressor
from sklearn.svm import LinearSVR
class TPOTLinearSVR(Regressor):
"""Fits a Linear Support Vector Regressor
Parameters
----------
C: float
Penalty parameter C of the error term.
dual: bool
Select the algorithm to either solve the dual or primal optimization problem.
"""
import_hash = {'sklearn.svm': ['LinearSVR']}
sklearn_class = LinearSVR
arg_types = (float, Bool)
def __init__(self):
pass
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
The TPOT library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details. You should have received a copy of the GNU General Public License along
with the TPOT library. If not, see http://www.gnu.org/licenses/.
"""
from .base import Regressor
from sklearn.neighbors import KNeighborsRegressor
class TPOTKNeighborsRegressor(Regressor):
"""Fits a k-nearest neighbor Regressor
Parameters
----------
n_neighbors: int
Number of neighbors to use by default for k_neighbors queries; must be a positive value
weights: int
Selects a value from the list: ['uniform', 'distance']
"""
import_hash = {'sklearn.neighbors': ['KNeighborsRegressor']}
sklearn_class = KNeighborsRegressor
arg_types = (int, int)
def __init__(self):
pass
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
The TPOT library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details. You should have received a copy of the GNU General Public License along
with the TPOT library. If not, see http://www.gnu.org/licenses/.
"""
from .base import Regressor
from sklearn.ensemble import RandomForestRegressor
class TPOTRandomForestClassifier(Regressor):
"""Fits a random forest Regressor.
Parameters
----------
None
"""
import_hash = {'sklearn.ensemble': ['RandomForestRegressor']}
sklearn_class = RandomForestRegressor
arg_types = ()
def __init__(self):
pass
def preprocess_args(self):
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
The TPOT library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details. You should have received a copy of the GNU General Public License along
with the TPOT library. If not, see http://www.gnu.org/licenses/.
"""
from .base import Regressor
from sklearn.ensemble import GradientBoostingRegressor
class TPOTGradientBRegressor(Regressor):
"""Fits a Gradient Boosting Regressor
Parameters
----------
learning_rate: float
Shrinks the contribution of each tree by learning_rate
max_features: float
Maximum number of features to use (proportion of total features)
"""
import_hash = {'sklearn.ensemble': ['GradientBoostingRegressor']}
sklearn_class = GradientBoostingRegressor
arg_types = (float, float)
def __init__(self):
pass
any later version.
The TPOT library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details. You should have received a copy of the GNU General Public License along
with the TPOT library. If not, see http://www.gnu.org/licenses/.
"""
from ...gp_types import Bool
from .base import Regressor
from sklearn.linear_model import LassoLarsCV
class TPOTLassoLarsCV(Regressor):
"""Fits a LassoLarsCV Regressor
Parameters
----------
normalize: bool
If True, the regressors X will be normalized before regression.
"""
import_hash = {'sklearn.linear_model': ['LassoLarsCV']}
sklearn_class = LassoLarsCV
arg_types = (Bool, )
def __init__(self):
pass
def preprocess_args(self, normalize):
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
The TPOT library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details. You should have received a copy of the GNU General Public License along
with the TPOT library. If not, see http://www.gnu.org/licenses/.
"""
from .base import Regressor
from sklearn.linear_model import ElasticNet
class TPOTElasticNet(Regressor):
"""Fits a Elastic Net Regressor
Parameters
----------
alpha: float
Constant that multiplies the penalty terms.
l1_ratio: int
The ElasticNet mixing parameter, with 0 <= l1_ratio <= 1
"""
import_hash = {'sklearn.linear_model': ['ElasticNet']}
sklearn_class = ElasticNet
arg_types = (float, float)
def __init__(self):
pass