How to use the diffprivlib.utils.DiffprivlibCompatibilityWarning function in diffprivlib

To help you get started, we’ve selected a few diffprivlib examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github IBM / differential-privacy-library / tests / models / test_KMeans.py View on Github external
def test_sample_weights(self):
        clf = KMeans(30, [(0, 1)], 3)

        X = np.array([0.1, 0.1, 0.1, 0.1, 0.5, 0.5, 0.5, 0.5, 0.9, 0.9, 0.9]).reshape(-1, 1)
        with self.assertWarns(DiffprivlibCompatibilityWarning):
            clf.fit(X, None, 1)
github IBM / differential-privacy-library / tests / models / test_LinearRegression.py View on Github external
def test_sample_weight_warning(self):
        clf = LinearRegression(data_norm=5.5, range_X=5, range_y=1)

        X = np.array(
            [0.50, 0.75, 1.00, 1.25, 1.50, 1.75, 1.75, 2.00, 2.25, 2.50, 2.75, 3.00, 3.25, 3.50, 4.00, 4.25, 4.50, 4.75,
             5.00, 5.50])
        y = np.array([0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1])
        X = X[:, np.newaxis]

        with self.assertWarns(DiffprivlibCompatibilityWarning):
            clf.fit(X, y, sample_weight=np.ones_like(y))
github IBM / differential-privacy-library / tests / models / test_LogisticRegression.py View on Github external
def test_sample_weight_warning(self):
        X = np.array(
            [0.50, 0.75, 1.00, 1.25, 1.50, 1.75, 1.75, 2.00, 2.25, 2.50, 2.75, 3.00, 3.25, 3.50, 4.00, 4.25, 4.50, 4.75,
             5.00, 5.50])
        y = np.array([0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1])
        X = X[:, np.newaxis]

        clf = LogisticRegression(data_norm=5.5)

        with self.assertWarns(DiffprivlibCompatibilityWarning):
            clf.fit(X, y, sample_weight=np.ones_like(y))
github IBM / differential-privacy-library / tests / models / test_GaussianNB.py View on Github external
def test_sample_weight_warning(self):
        X = np.random.random((10, 2))
        y = np.random.randint(2, size=10)
        clf = GaussianNB(epsilon=1, bounds=[(0, 1), (0, 1)])
        w = abs(np.random.randn(10))

        with self.assertWarns(DiffprivlibCompatibilityWarning):
            clf.fit(X, y, sample_weight=w)
github IBM / differential-privacy-library / tests / models / test_LogisticRegression.py View on Github external
def test_solver_warning(self):
        with self.assertWarns(DiffprivlibCompatibilityWarning):
            LogisticRegression(solver="newton-cg")
github IBM / differential-privacy-library / tests / models / test_KMeans.py View on Github external
def test_unused_args(self):
        with self.assertWarns(DiffprivlibCompatibilityWarning):
            KMeans(verbose=1)
github IBM / differential-privacy-library / tests / models / test_PCA.py View on Github external
def test_solver_warning(self):
        with self.assertWarns(DiffprivlibCompatibilityWarning):
            PCA(svd_solver='full')
github IBM / differential-privacy-library / diffprivlib / utils.py View on Github external
Arguments for which warnings should be thrown.

    Returns
    -------
    None

    """
    if isinstance(args, str):
        args = [args]

    if not isinstance(args, (dict, list)):
        raise ValueError("args must be a string, a list of strings or a dictionary, got type '%s'." % type(args))

    for arg in args:
        warnings.warn("Parameter '%s' is not functional in diffprivlib.  Remove this parameter to suppress this "
                      "warning." % arg, DiffprivlibCompatibilityWarning)
github IBM / differential-privacy-library / diffprivlib / models / logistic_regression.py View on Github external
def _check_multi_class(multi_class, solver, n_classes):
    del solver, n_classes

    if multi_class != 'ovr':
        warnings.warn("For diffprivlib, multi_class must be 'ovr'.", DiffprivlibCompatibilityWarning)
        multi_class = 'ovr'

    return multi_class
github IBM / differential-privacy-library / diffprivlib / models / logistic_regression.py View on Github external
del class_weight
    if sample_weight is not None:
        warnings.warn("For diffprivlib, sample_weight is not used. Set to None to suppress this warning.",
                      DiffprivlibCompatibilityWarning)
        del sample_weight
    if intercept_scaling != 1.:
        warnings.warn("For diffprivlib, intercept_scaling is not used. Set to 1.0 to suppress this warning.",
                      DiffprivlibCompatibilityWarning)
        del intercept_scaling
    if max_squared_sum is not None:
        warnings.warn("For diffprivlib, max_squared_sum is not used. Set to None to suppress this warning.",
                      DiffprivlibCompatibilityWarning)
        del max_squared_sum
    if random_state is not None:
        warnings.warn("For diffprivlib, random_state is not used. Set to None to suppress this warning.",
                      DiffprivlibCompatibilityWarning)
        del random_state

    if isinstance(Cs, numbers.Integral):
        Cs = np.logspace(-4, 4, int(Cs))

    solver = _check_solver(solver, penalty, dual)

    # Data norm increases if intercept is included
    if fit_intercept:
        data_norm = np.sqrt(data_norm ** 2 + 1)

    # Pre-processing.
    if check_input:
        X = check_array(X, accept_sparse='csr', dtype=np.float64, accept_large_sparse=solver != 'liblinear')
        y = check_array(y, ensure_2d=False, dtype=None)
        check_consistent_length(X, y)