Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
Code inspired by Matlab code from Cyril Pernet and Guillaume
Rousselet [1]_.
Requires scikit-learn.
References
----------
.. [1] Pernet CR, Wilcox R, Rousselet GA. Robust Correlation Analyses:
False Positive and Power Validation Using a New Open Source Matlab
Toolbox. Frontiers in Psychology. 2012;3:606.
doi:10.3389/fpsyg.2012.00606.
"""
# Check that sklearn is installed
from pingouin.utils import _is_sklearn_installed
_is_sklearn_installed(raise_error=True)
from scipy.stats import chi2
from sklearn.covariance import MinCovDet
X = np.column_stack((x, y))
nrows, ncols = X.shape
gval = np.sqrt(chi2.ppf(0.975, 2))
# Compute center and distance to center
center = MinCovDet(random_state=42).fit(X).location_
B = X - center
B2 = B**2
bot = B2.sum(axis=1)
# Loop over rows
dis = np.zeros(shape=(nrows, nrows))
for i in np.arange(nrows):
if bot[i] != 0:
4. Return only the coefficients
>>> logistic_regression(X, y, coef_only=True)
array([-0.36736745, -0.04374684, -0.47829392])
5. Passing custom parameters to sklearn
>>> lom = logistic_regression(X, y, solver='sag', max_iter=10000,
... random_state=42)
>>> print(lom['coef'].values)
[-0.36751796 -0.04367056 -0.47841908]
"""
# Check that sklearn is installed
from pingouin.utils import _is_sklearn_installed
_is_sklearn_installed(raise_error=True)
from sklearn.linear_model import LogisticRegression
# Extract names if X is a Dataframe or Series
if isinstance(X, pd.DataFrame):
names = X.keys().tolist()
elif isinstance(X, pd.Series):
names = [X.name]
else:
names = []
# Convert to numpy array
X = np.asarray(X)
y = np.asarray(y)
assert y.ndim == 1, 'y must be one-dimensional.'
assert 0 < alpha < 1, 'alpha must be between 0 and 1.'