Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[MRG] Add ChangedBehaviorWarning message for LMNN too #214

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 22 additions & 5 deletions metric_learn/lmnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import warnings
from collections import Counter
from six.moves import xrange
from sklearn.exceptions import ChangedBehaviorWarning
from sklearn.metrics import euclidean_distances
from sklearn.base import TransformerMixin

Expand All @@ -26,18 +27,20 @@

# commonality between LMNN implementations
class _base_LMNN(MahalanobisMixin, TransformerMixin):
def __init__(self, init='auto', k=3, min_iter=50, max_iter=1000,
def __init__(self, init=None, k=3, min_iter=50, max_iter=1000,
learn_rate=1e-7, regularization=0.5, convergence_tol=0.001,
use_pca=True, verbose=False, preprocessor=None,
n_components=None, num_dims='deprecated', random_state=None):
"""Initialize the LMNN object.

Parameters
----------
init : string or numpy array, optional (default='auto')
init : None, string or numpy array, optional (default=None)
Initialization of the linear transformation. Possible options are
'auto', 'pca', 'lda', 'identity', 'random', and a numpy array of shape
(n_features_a, n_features_b).
'auto', 'pca', 'identity', 'random', and a numpy array of shape
(n_features_a, n_features_b). If None, will be set automatically to
'auto' (this option is to raise a warning if 'init' is not set,
and stays to its default value None, in v0.5.0).

'auto'
Depending on ``n_components``, the most reasonable initialization
Expand Down Expand Up @@ -135,7 +138,21 @@ def fit(self, X, y):
if len(label_inds) != num_pts:
raise ValueError('Must have one label per point.')
self.labels_ = np.arange(len(unique_labels))
self.transformer_ = _initialize_transformer(output_dim, X, y, self.init,

# if the init is the default (None), we raise a warning
if self.init is None:
# TODO: replace init=None by init='auto' in v0.6.0 and remove the warning
msg = ("Warning, no init was set (`init=None`). As of version 0.5.0, "
"the default init will now be set to 'auto', instead of the "
"previous identity matrix. If you still want to use the identity "
"matrix as before, set init='identity'. This warning "
"will disappear in v0.6.0, and `init` parameter's default value "
"will be set to 'auto'.")
warnings.warn(msg, ChangedBehaviorWarning)
init = 'auto'
else:
init = self.init
self.transformer_ = _initialize_transformer(output_dim, X, y, init,
self.verbose,
self.random_state)
required_k = np.bincount(label_inds).min()
Expand Down
2 changes: 1 addition & 1 deletion metric_learn/lsml.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def _fit(self, quadruplets, weights=None):
else:
self.w_ = weights
self.w_ /= self.w_.sum() # weights must sum to 1
# if the prior is the default (identity), we raise a warning just in case
# if the prior is the default (None), we raise a warning
if self.prior is None:
msg = ("Warning, no prior was set (`prior=None`). As of version 0.5.0, "
"the default prior will now be set to "
Expand Down
2 changes: 1 addition & 1 deletion metric_learn/mlkr.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def fit(self, X, y):
m = self.n_components
if m is None:
m = d
# if the init is the default (identity), we raise a warning just in case
# if the init is the default (None), we raise a warning
if self.init is None:
# TODO:
# replace init=None by init='auto' in v0.6.0 and remove the warning
Expand Down
12 changes: 6 additions & 6 deletions metric_learn/nca.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,16 +141,16 @@ def fit(self, X, y):
train_time = time.time()

# Initialize A
# if the init is the default (auto), we raise a warning just in case
# if the init is the default (None), we raise a warning
if self.init is None:
# TODO: replace init=None by init='auto' in v0.6.0 and remove the warning
msg = ("Warning, no init was set (`init=None`). As of version 0.5.0, "
"the default init will now be set to 'auto', instead of the "
"previous scaling matrix. same scaling matrix as before as an "
"init, set init=np.eye(X.shape[1])/"
"(np.maximum(X.max(axis=0)-X.min(axis=0), EPS))). This warning "
"will disappear in v0.6.0, and `init` parameter's default value "
"will be set to 'auto'.")
"previous scaling matrix. If you still want to use the same "
"scaling matrix as before, set "
"init=np.eye(X.shape[1])/(np.maximum(X.max(axis=0)-X.min(axis=0)"
", EPS))). This warning will disappear in v0.6.0, and `init` "
"parameter's default value will be set to 'auto'.")
warnings.warn(msg, ChangedBehaviorWarning)
init = 'auto'
else:
Expand Down
2 changes: 1 addition & 1 deletion metric_learn/sdml.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def _fit(self, pairs, y):
type_of_inputs='tuples')

# set up (the inverse of) the prior M
# if the prior is the default (identity), we raise a warning just in case
# if the prior is the default (None), we raise a warning
if self.prior is None:
# TODO:
# replace prior=None by prior='identity' in v0.6.0 and remove the
Expand Down
27 changes: 22 additions & 5 deletions test/metric_learn_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,23 @@ def grad(x):
np.linalg.norm(approx_fprime(L.ravel(), fun, epsilon)))
np.testing.assert_almost_equal(rel_diff, 0., decimal=5)

def test_changed_behaviour_warning(self):
# test that a ChangedBehavior warning is thrown about the init, if the
# default parameters are used.
# TODO: remove in v.0.6
X = np.array([[0, 0], [0, 1], [2, 0], [2, 1]])
y = np.array([1, 0, 1, 0])
lmnn = LMNN(k=2)
msg = ("Warning, no init was set (`init=None`). As of version 0.5.0, "
"the default init will now be set to 'auto', instead of the "
"previous identity matrix. If you still want to use the identity "
"matrix as before, set init='identity'. This warning "
"will disappear in v0.6.0, and `init` parameter's default value "
"will be set to 'auto'.")
with pytest.warns(ChangedBehaviorWarning) as raised_warning:
lmnn.fit(X, y)
assert any(msg == str(wrn.message) for wrn in raised_warning)


@pytest.mark.parametrize('X, y, loss', [(np.array([[0], [1], [2], [3]]),
[1, 1, 0, 0], 3.0),
Expand Down Expand Up @@ -744,11 +761,11 @@ def test_changed_behaviour_warning(self):
nca = NCA()
msg = ("Warning, no init was set (`init=None`). As of version 0.5.0, "
"the default init will now be set to 'auto', instead of the "
"previous scaling matrix. same scaling matrix as before as an "
"init, set init=np.eye(X.shape[1])/"
"(np.maximum(X.max(axis=0)-X.min(axis=0), EPS))). This warning will"
" disappear in v0.6.0, and `init` parameter's default value will "
"be set to 'auto'.")
"previous scaling matrix. If you still want to use the same "
"scaling matrix as before, set "
"init=np.eye(X.shape[1])/(np.maximum(X.max(axis=0)-X.min(axis=0)"
", EPS))). This warning will disappear in v0.6.0, and `init` "
"parameter's default value will be set to 'auto'.")
with pytest.warns(ChangedBehaviorWarning) as raised_warning:
nca.fit(X, y)
assert any(msg == str(wrn.message) for wrn in raised_warning)
Expand Down
2 changes: 1 addition & 1 deletion test/test_base_metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def test_covariance(self):
def test_lmnn(self):
self.assertRegexpMatches(
str(metric_learn.LMNN()),
r"(python_)?LMNN\(convergence_tol=0.001, init='auto', k=3, "
r"(python_)?LMNN\(convergence_tol=0.001, init=None, k=3, "
r"learn_rate=1e-07,\s+"
r"max_iter=1000, min_iter=50, n_components=None, "
r"num_dims='deprecated',\s+preprocessor=None, random_state=None, "
Expand Down