Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def monte_carlo_lasso(first_stage=lambda: WeightedLasso(alpha = 0.01,
fit_intercept = True,
tol = 1e-6, random_state = 123), folder='lasso'):
n_exp = 1000
# Assumes that X has already been offset
n_samples, n_features = X.shape
# Special case: n_features=1
if n_features == 1:
C_hat = np.ones((1, 1))
tausq = (X.T @ X / n_samples).flatten()
return np.diag(1 / tausq) @ C_hat
coefs = np.empty((n_features, n_features - 1))
tausq = np.empty(n_features)
# Compute Lasso coefficients for the columns of the design matrix
for i in range(n_features):
y = X[:, i]
X_reduced = X[:, list(range(i)) + list(range(i + 1, n_features))]
# Call weighted lasso on reduced design matrix
# Inherit some parameters from the parent
local_wlasso = WeightedLasso(
alpha=self.alpha,
fit_intercept=False,
max_iter=self.max_iter,
tol=self.tol
).fit(X_reduced, y, sample_weight=sample_weight)
coefs[i] = local_wlasso.coef_
# Weighted tau
if sample_weight is not None:
y_weighted = y * sample_weight / np.sum(sample_weight)
else:
y_weighted = y / n_samples
tausq[i] = np.dot(y - local_wlasso.predict(X_reduced), y_weighted)
# Compute C_hat
C_hat = np.diag(np.ones(n_features))
C_hat[0][1:] = -coefs[0]
for i in range(1, n_features):
Target. Will be cast to X's dtype if necessary
sample_weight : numpy array of shape [n_samples]
Individual weights for each sample.
The weights will be normalized internally.
"""
# Make weighted splitter
cv_temp = self.cv
self.cv = _weighted_check_cv(self.cv).split(X, y, sample_weight=sample_weight)
# Fit weighted model
self._fit_weighted_linear_model(X, y, sample_weight)
self.cv = cv_temp
return self
class DebiasedLasso(WeightedLasso):
"""Debiased Lasso model.
Implementation was derived from .
Only implemented for single-dimensional output.
Parameters
----------
alpha : string | float, optional, default 'auto'.
Constant that multiplies the L1 term. Defaults to 'auto'.
``alpha = 0`` is equivalent to an ordinary least square, solved
by the :class:`LinearRegression` object. For numerical
reasons, using ``alpha = 0`` with the ``Lasso`` object is not advised.
Given this, you should use the :class:`.LinearRegression` object.
fit_intercept : boolean, optional, default True