Skip to content
Merged
Show file tree
Hide file tree
Changes from 39 commits
Commits
Show all changes
40 commits
Select commit Hold shift + click to select a range
88e49ae
Test warnings: Fix warning about old-style widget messages
janezd Dec 22, 2018
4f8ec03
Test warnings: Rename widgets' self.info to self.infolabel
janezd Dec 22, 2018
c5a6c98
Test warnings: Register DummyComponent as provider for test widget
janezd Dec 22, 2018
4ed3208
Test warnings: Mock and test logging instead of printing it out
janezd Dec 22, 2018
2f48790
Test warnings: rename assertEquals to assertEqual
janezd Dec 22, 2018
57e60e7
Test warnings: OWDataSampler, use randint instead of random_integer, …
janezd Dec 22, 2018
53767bb
Test warnings: Remove warnings related to empty or all-nan matrices
janezd Dec 22, 2018
7317da2
Test warnings: Remove warnings in OWRank
janezd Dec 22, 2018
a5097b8
OWCalibrationPlot: Remove division by zero in smoothing
janezd Dec 23, 2018
b37590c
SGD: Set tol to 1e-3 (future default from 0.21) to avoid warnings
janezd Dec 23, 2018
a8d9bdf
Test warnings: Remove convergence warnings in tests for OWCalibration
janezd Dec 23, 2018
30f0b9e
Test warnings: Remove division by zero in confusion matrix
janezd Dec 23, 2018
364de14
Test warnings: Silence warning about ill-defined F-score and precision
janezd Dec 23, 2018
f6c8312
Test warnings: Remove selecting class value: widget.data is None at t…
janezd Dec 23, 2018
bb86133
Test warnings: Fix deprecated indexing in test_owlogisticregression.py
janezd Dec 23, 2018
919468f
Test warnings: Test logged error in report test instead of printing i…
janezd Dec 23, 2018
16cb108
Test warnings: Ignore division by zero in owcorrespondence
janezd Dec 23, 2018
a171cb2
Test warnings: Silence warning in construction of infinite distances …
janezd Dec 23, 2018
f1ad559
MDS: Don't set n_init to 4 if init_data is given
janezd Dec 24, 2018
6b4e0cd
Test warnings: Silence matrix deprecation warning in scipy
janezd Dec 24, 2018
93f135d
Test warnings: Silence convergence warnings in tests for widgets of n…
janezd Dec 24, 2018
1a1234f
Test warnings: Fix or silence warning in MDS tests
janezd Dec 24, 2018
2adcf6c
Scatterplotgraph: Faster construction of continuous palette if all co…
janezd Jan 17, 2019
e8aeee6
OWManifoldLearning: capture and show warning about disconnected graph…
janezd Dec 24, 2018
b92a126
OWtSNE: treat all-nan columns as constant in check_data
janezd Dec 24, 2018
c318db6
Test warnings: Silence division by zero warning in PCA
janezd Dec 24, 2018
ef82ba3
Box plot: Fix divisions by zero in statistical tests
janezd Dec 24, 2018
5af6da7
OWDistributions: Fix arguments in smoothing (float -> int)
janezd Dec 24, 2018
d64d773
Orange.distance: silence warning about unhandled nans that are handle…
janezd Dec 24, 2018
598bbb8
OWSieve: Fix division by zero
janezd Dec 24, 2018
28799b8
Heatmap tests: Silence warnings about convergence
janezd Dec 24, 2018
f9b1edc
Test concurrent: Silence warning that the class (we're testing) is ob…
janezd Dec 24, 2018
03055fb
Test warnings: Remove prints
janezd Dec 24, 2018
bd289f6
Tests: Stop report window and some other widgets from showing
janezd Dec 26, 2018
7dd696f
owfeaturestatistics: Change deprecated locale.format -> format_string
lanzagar Jan 11, 2019
7884e2c
Fix warnings in widget tests: Follow suggestions in review (1)
janezd Jan 11, 2019
8950666
data.utils.histogram: Fix warning for means of empty bins
janezd Jan 11, 2019
3395a0c
Venn Diagram: Silence warning about nans
janezd Dec 26, 2018
23d674d
manifold.MDS: Change the signature back to same as skl, fix n_init in…
janezd Jan 11, 2019
c5f8118
fixup! Test warnings: Remove warnings in OWRank
janezd Jan 18, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Orange/classification/sgd.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ class SGDClassificationLearner(SklLearner):

def __init__(self, loss='hinge', penalty='l2', alpha=0.0001,
l1_ratio=0.15, fit_intercept=True, max_iter=5,
tol=None, shuffle=True, epsilon=0.1, random_state=None,
tol=1e-3, shuffle=True, epsilon=0.1, random_state=None,
learning_rate='invscaling', eta0=0.01, power_t=0.25,
warm_start=False, average=False, preprocessors=None):
super().__init__(preprocessors=preprocessors)
Expand Down
2 changes: 1 addition & 1 deletion Orange/data/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -983,7 +983,7 @@ def collect(attr):
vstack(collect("metas")),
merge1d(collect("W"))
)
conc.ids = np.hstack(map(operator.attrgetter("ids"), tables))
conc.ids = np.hstack([table.ids for table in tables])
names = [table.name for table in tables if table.name != "untitled"]
if names:
conc.name = names[0]
Expand Down
17 changes: 9 additions & 8 deletions Orange/distance/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,14 +232,15 @@ def __call__(self, e1, e2=None):

x1 = _orange_to_numpy(e1)
x2 = _orange_to_numpy(e2)
dist = self.compute_distances(x1, x2)
if self.impute and np.isnan(dist).any():
dist = np.nan_to_num(dist)
if isinstance(e1, (Table, RowInstance)):
dist = DistMatrix(dist, e1, e2, self.axis)
else:
dist = DistMatrix(dist)
return dist
with np.errstate(invalid="ignore"): # nans are handled below
dist = self.compute_distances(x1, x2)
if self.impute and np.isnan(dist).any():
dist = np.nan_to_num(dist)
if isinstance(e1, (Table, RowInstance)):
dist = DistMatrix(dist, e1, e2, self.axis)
else:
dist = DistMatrix(dist)
return dist

def compute_distances(self, x1, x2):
"""
Expand Down
2 changes: 1 addition & 1 deletion Orange/regression/linear.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ class SGDRegressionLearner(LinearRegressionLearner):
preprocessors = SklLearner.preprocessors + [Normalize()]

def __init__(self, loss='squared_loss', penalty='l2', alpha=0.0001,
l1_ratio=0.15, fit_intercept=True, max_iter=5, tol=None,
l1_ratio=0.15, fit_intercept=True, max_iter=5, tol=1e-3,
shuffle=True, epsilon=0.1, n_jobs=1, random_state=None,
learning_rate='invscaling', eta0=0.01, power_t=0.25,
class_weight=None, warm_start=False, average=False,
Expand Down
9 changes: 8 additions & 1 deletion Orange/statistics/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

It also patches bottleneck to contain these functions.
"""
import warnings
from warnings import warn

import bottleneck as bn
Expand Down Expand Up @@ -353,7 +354,10 @@ def weighted_mean():

def _nan_min_max(x, func, axis=0):
if not sp.issparse(x):
return func(x, axis=axis)
with warnings.catch_warnings():
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This currently still changes the behaviour of nanmin/nanmax compared to numpy's version.
I thought we agreed that for numpy arrays our functions should be equivalent to np.nanmin/max

warnings.filterwarnings("ignore", ".*All-NaN slice encountered.*",
RuntimeWarning)
return func(x, axis=axis)
if axis is None:
extreme = func(x.data, axis=axis) if x.nnz else float('nan')
if sparse_has_implicit_zeros(x):
Expand Down Expand Up @@ -423,6 +427,9 @@ def nanmean(x, axis=None):
""" Equivalent of np.nanmean that supports sparse or dense matrices. """
def nanmean_sparse(x):
n_values = np.prod(x.shape) - np.sum(np.isnan(x.data))
if not n_values:
warnings.warn(RuntimeWarning, "Mean of empty slice")
return np.nan
return np.nansum(x.data) / n_values

return _apply_func(x, np.nanmean, nanmean_sparse, axis=axis)
Expand Down
7 changes: 7 additions & 0 deletions Orange/tests/test_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,11 @@
import unittest

import traceback
import warnings

import numpy as np
from sklearn.exceptions import ConvergenceWarning

from Orange.base import SklLearner

import Orange.classification
Expand Down Expand Up @@ -269,6 +273,9 @@ class LearnerAccessibility(unittest.TestCase):

def setUp(self):
Variable._clear_all_caches()
# Convergence warnings are irrelevant for these tests
warnings.filterwarnings("ignore", ".*", ConvergenceWarning)


def all_learners(self):
classification_modules = pkgutil.walk_packages(
Expand Down
8 changes: 8 additions & 0 deletions Orange/tests/test_neural_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@
# pylint: disable=missing-docstring

import unittest
import warnings

from sklearn.exceptions import ConvergenceWarning

from Orange.data import Table
from Orange.classification import NNClassificationLearner
Expand All @@ -17,6 +20,11 @@ def setUpClass(cls):
cls.housing = Table('housing')
cls.learner = NNLearner()

def setUp(self):
# Convergence warnings are irrelevant for these tests
warnings.filterwarnings("ignore", ".*", ConvergenceWarning)
super().setUp()

def test_NN_classification(self):
results = CrossValidation(self.iris, [NNClassificationLearner()], k=3)
ca = CA(results)
Expand Down
12 changes: 12 additions & 0 deletions Orange/tests/test_sgd.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@
# pylint: disable=missing-docstring

import unittest
import warnings

import numpy as np
from sklearn.exceptions import ConvergenceWarning

from Orange.data import Table
from Orange.classification import SGDClassificationLearner
Expand All @@ -12,6 +14,11 @@


class TestSGDRegressionLearner(unittest.TestCase):
def setUp(self):
# Convergence warnings are irrelevant for these tests
warnings.filterwarnings("ignore", ".*", ConvergenceWarning)
super().setUp()

def test_SGDRegression(self):
nrows, ncols = 500, 5
X = np.random.rand(nrows, ncols)
Expand All @@ -32,6 +39,11 @@ class TestSGDClassificationLearner(unittest.TestCase):
def setUpClass(cls):
cls.iris = Table('iris')

def setUp(self):
# Convergence warnings are irrelevant for these tests
warnings.filterwarnings("ignore", ".*", ConvergenceWarning)
super().setUp()

def test_SGDClassification(self):
sgd = SGDClassificationLearner()
res = CrossValidation(self.iris, [sgd], k=3)
Expand Down
4 changes: 4 additions & 0 deletions Orange/tests/test_svm.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@
# pylint: disable=missing-docstring

import unittest
import warnings

import numpy as np
from sklearn.exceptions import ConvergenceWarning

from Orange.classification import (SVMLearner, LinearSVMLearner,
NuSVMLearner, OneClassSVMLearner)
Expand All @@ -24,6 +26,8 @@ def test_SVM(self):
self.assertGreater(CA(res)[0], 0.9)

def test_LinearSVM(self):
# This warning is irrelevant here
warnings.filterwarnings("ignore", ".*", ConvergenceWarning)
learn = LinearSVMLearner()
res = CrossValidation(self.data, [learn], k=2)
self.assertGreater(CA(res)[0], 0.8)
Expand Down
2 changes: 1 addition & 1 deletion Orange/widgets/data/owdatasampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ def __init__(self, n=0, stratified=False, replace=False,
def __call__(self, table):
if self.replace:
rgen = np.random.RandomState(self.random_state)
sample = rgen.random_integers(0, len(table) - 1, self.n)
sample = rgen.randint(0, len(table), self.n)
o = np.ones(len(table))
o[sample] = 0
others = np.nonzero(o)[0]
Expand Down
4 changes: 2 additions & 2 deletions Orange/widgets/data/owfeaturestatistics.py
Original file line number Diff line number Diff line change
Expand Up @@ -556,9 +556,9 @@ def data(self, index, role):
elif output in (np.inf, -np.inf):
output = '%s∞' % ['', '-'][output < 0]
elif isinstance(output, int):
output = locale.format('%d', output, grouping=True)
output = locale.format_string('%d', output, grouping=True)
elif isinstance(output, float):
output = locale.format('%.2f', output, grouping=True)
output = locale.format_string('%.2f', output, grouping=True)

return output

Expand Down
6 changes: 3 additions & 3 deletions Orange/widgets/data/owfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ def __init__(self):
url_combo.activated.connect(self._url_set)

box = gui.vBox(self.controlArea, "Info")
self.info = gui.widgetLabel(box, 'No data loaded.')
self.infolabel = gui.widgetLabel(box, 'No data loaded.')
self.warnings = gui.widgetLabel(box, '')

box = gui.widgetBox(self.controlArea, "Columns (Double click to edit)")
Expand Down Expand Up @@ -304,7 +304,7 @@ def load_data(self):
self.data = None
self.sheet_box.hide()
self.Outputs.data.send(None)
self.info.setText("No data.")
self.infolabel.setText("No data.")

def _try_load(self):
# pylint: disable=broad-except
Expand Down Expand Up @@ -335,7 +335,7 @@ def _try_load(self):
if warnings:
self.Warning.load_warning(warnings[-1].message.args[0])

self.info.setText(self._describe(data))
self.infolabel.setText(self._describe(data))

self.loaded_file = self.last_path()
add_origin(data, self.loaded_file)
Expand Down
2 changes: 1 addition & 1 deletion Orange/widgets/data/owpaintdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -795,7 +795,7 @@ def __init__(self):
self.class_model.rowsInserted.connect(self._class_count_changed)
self.class_model.rowsRemoved.connect(self._class_count_changed)

if not self.data:
if self.data is None or not len(self.data):
self.data = []
self.__buffer = np.zeros((0, 3))
elif isinstance(self.data, np.ndarray):
Expand Down
22 changes: 12 additions & 10 deletions Orange/widgets/data/owrank.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
Rank (score) features for prediction.

"""

import warnings
from collections import namedtuple, OrderedDict
import logging
from functools import partial
Expand Down Expand Up @@ -143,7 +143,10 @@ def headerData(self, section, orientation, role=Qt.DisplayRole):
def setExtremesFrom(self, column, values):
"""Set extremes for columnn's ratio bars from values"""
try:
vmin = np.nanmin(values)
with warnings.catch_warnings():
warnings.filterwarnings(
"ignore", ".*All-NaN slice encountered.*", RuntimeWarning)
vmin = np.nanmin(values)
if np.isnan(vmin):
raise TypeError
except TypeError:
Expand Down Expand Up @@ -368,31 +371,30 @@ def set_learner(self, scorer, id):

@memoize_method()
def get_method_scores(self, method):
# These errors often happen, but they result in nans, which
# are handled correctly by the widget
estimator = method.scorer()
data = self.data
try:
scores = np.asarray(estimator(data))
except ValueError:
log.warning("Scorer %s wasn't able to compute all scores at once",
method.name)
try:
scores = np.array([estimator(data, attr)
for attr in data.domain.attributes])
except ValueError:
log.error(
"Scorer %s wasn't able to compute scores at all",
method.name)
log.error("%s doesn't work on this data", method.name)
scores = np.full(len(data.domain.attributes), np.nan)
else:
log.warning("%s had to be computed separately for each "
"variable", method.name)
return scores

@memoize_method()
def get_scorer_scores(self, scorer):
try:
scores = scorer.scorer.score_data(self.data).T
except ValueError:
log.error(
"Scorer %s wasn't able to compute scores at all",
scorer.name)
log.error("%s doesn't work on this data", scorer.name)
scores = np.full((len(self.data.domain.attributes), 1), np.nan)

labels = ((scorer.shortname,)
Expand Down
4 changes: 2 additions & 2 deletions Orange/widgets/data/tests/test_owconcatenate.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,12 +79,12 @@ def get_source():
# test adding source
self.widget.controls.append_source_column.toggle()
source = get_source()
self.assertEquals(source.name, "Source ID")
self.assertEqual(source.name, "Source ID")
# test name changing
self.widget.controls.source_attr_name.setText("Source")
self.widget.controls.source_attr_name.callback()
source = get_source()
self.assertEquals(source.name, "Source")
self.assertEqual(source.name, "Source")
# test source_column role
places = ["class_vars", "attributes", "metas"]
for i, place in enumerate(places):
Expand Down
7 changes: 3 additions & 4 deletions Orange/widgets/data/tests/test_owdatasampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def select_sampling_type(self, sampling_type):
def test_no_intersection_in_outputs(self):
""" Check whether outputs intersect and whether length of outputs sums
to length of original data """
self.send_signal("Data", self.zoo)
self.send_signal("Data", self.iris)
w = self.widget
sampling_types = [w.FixedProportion, w.FixedSize, w.CrossValidation]

Expand All @@ -78,7 +78,7 @@ def test_no_intersection_in_outputs(self):

sample = self.get_output("Data Sample")
other = self.get_output("Remaining Data")
self.assertEqual(len(self.zoo), len(sample) + len(other))
self.assertEqual(len(self.iris), len(sample) + len(other))
self.assertNoIntersection(sample, other)

def test_bigger_size_with_replacement(self):
Expand Down Expand Up @@ -114,5 +114,4 @@ def set_fixed_sample_size(self, sample_size, with_replacement=False):
return self.widget.sampleSizeSpin.value()

def assertNoIntersection(self, sample, other):
for inst in sample:
self.assertNotIn(inst, other)
self.assertFalse(bool(set(sample.ids) & set(other.ids)))
5 changes: 4 additions & 1 deletion Orange/widgets/data/tests/test_owfeaturestatistics.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import datetime
import warnings
from collections import namedtuple
from functools import wraps, partial
from itertools import chain
Expand All @@ -17,7 +18,6 @@

VarDataPair = namedtuple('VarDataPair', ['variable', 'data'])


# Continuous variable variations
continuous_full = VarDataPair(
ContinuousVariable('continuous_full'),
Expand Down Expand Up @@ -192,6 +192,9 @@ def setUp(self):
self.widget = self.create_widget(
OWFeatureStatistics, stored_settings={'auto_commit': False}
)
# scipy.sparse uses matrix; this filter can be removed when it stops
warnings.filterwarnings(
"ignore", ".*the matrix subclass.*", PendingDeprecationWarning)

def force_render_table(self):
"""Some fields e.g. histograms are only initialized when they actually
Expand Down
6 changes: 4 additions & 2 deletions Orange/widgets/data/tests/test_owfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def test_file_not_found(self):
self.assertEqual(file_name, path.basename(self.widget.last_path()))
self.assertTrue(self.widget.Error.file_not_found.is_shown())
self.assertIsNone(self.get_output(self.widget.Outputs.data))
self.assertEqual(self.widget.info.text(), "No data.")
self.assertEqual(self.widget.infolabel.text(), "No data.")

# Open a sample dataset
self.open_dataset("iris")
Expand Down Expand Up @@ -285,8 +285,10 @@ def test_with_warnings(self):
self.assertTrue(self.widget.Warning.load_warning.is_shown())

def test_fail(self):
with named_file("name\nc\n\nstring", suffix=".tab") as fn:
with named_file("name\nc\n\nstring", suffix=".tab") as fn, \
patch("Orange.widgets.data.owfile.log.exception") as log:
self.open_dataset(fn)
log.assert_called()
self.assertTrue(self.widget.Error.unknown.is_shown())

def test_read_format(self):
Expand Down
Loading