Skip to content

Commit

Permalink
Merge pull request #1653 from VesnaT/confusion_matrix_output
Browse files Browse the repository at this point in the history
[FIX] OWConfusionMatrix: Output None when no data is selected
  • Loading branch information
astaric authored Oct 13, 2016
2 parents c597742 + 3285d5d commit 282b1b0
Show file tree
Hide file tree
Showing 2 changed files with 62 additions and 43 deletions.
76 changes: 39 additions & 37 deletions Orange/widgets/evaluate/owconfusionmatrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,44 +324,46 @@ def commit(self):
predicted = self.results.predicted[self.selected_learner[0]]
selected = [i for i, t in enumerate(zip(actual, predicted))
if t in indices]
row_indices = self.results.row_indices[selected]

extra = []
class_var = self.data.domain.class_var
metas = self.data.domain.metas

if self.append_predictions:
predicted = numpy.array(predicted[selected], dtype=object)
extra.append(predicted.reshape(-1, 1))
var = Orange.data.DiscreteVariable(
"{}({})".format(class_var.name, learner_name),
class_var.values
if selected:
row_indices = self.results.row_indices[selected]
extra = []
class_var = self.data.domain.class_var
metas = self.data.domain.metas

if self.append_predictions:
predicted = numpy.array(predicted[selected], dtype=object)
extra.append(predicted.reshape(-1, 1))
var = Orange.data.DiscreteVariable(
"{}({})".format(class_var.name, learner_name),
class_var.values
)
metas = metas + (var,)

if self.append_probabilities and \
self.results.probabilities is not None:
probs = self.results.probabilities[self.selected_learner[0],
selected]
extra.append(numpy.array(probs, dtype=object))
pvars = [Orange.data.ContinuousVariable("p({})".format(value))
for value in class_var.values]
metas = metas + tuple(pvars)

X = self.data.X[row_indices]
Y = self.data.Y[row_indices]
M = self.data.metas[row_indices]
row_ids = self.data.ids[row_indices]

M = numpy.hstack((M,) + tuple(extra))
domain = Orange.data.Domain(
self.data.domain.attributes,
self.data.domain.class_vars,
metas
)
metas = metas + (var,)

if self.append_probabilities and \
self.results.probabilities is not None:
probs = self.results.probabilities[self.selected_learner[0],
selected]
extra.append(numpy.array(probs, dtype=object))
pvars = [Orange.data.ContinuousVariable("p({})".format(value))
for value in class_var.values]
metas = metas + tuple(pvars)

X = self.data.X[row_indices]
Y = self.data.Y[row_indices]
M = self.data.metas[row_indices]
row_ids = self.data.ids[row_indices]

M = numpy.hstack((M,) + tuple(extra))
domain = Orange.data.Domain(
self.data.domain.attributes,
self.data.domain.class_vars,
metas
)
data = Orange.data.Table.from_numpy(domain, X, Y, M)
data.ids = row_ids
data.name = learner_name
data = Orange.data.Table.from_numpy(domain, X, Y, M)
data.ids = row_ids
data.name = learner_name
else:
data = None

else:
data = None
Expand Down
29 changes: 23 additions & 6 deletions Orange/widgets/evaluate/tests/test_owconfusionmatrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
from Orange.widgets.evaluate.owconfusionmatrix import OWConfusionMatrix
from Orange.widgets.tests.base import WidgetTest

class TestOWClassificationTree(WidgetTest):

class TestOWConfusionMatrix(WidgetTest):
@classmethod
def setUpClass(cls):
super().setUpClass()
Expand All @@ -28,14 +29,30 @@ def test_selected_learner(self):
"""Check learner and model for various values of all parameters
when pruning parameters are not checked
"""
self.widget.set_results(self.results_2_iris)
self.send_signal("Evaluation Results", self.results_2_iris)
self.assertEqual(self.widget.selected_learner, [0])
self.widget.selected_learner[:] = [1]
self.widget.set_results(self.results_2_titanic)
self.send_signal("Evaluation Results", self.results_2_titanic)
self.widget.selected_learner[:] = [1]
self.widget.set_results(self.results_1_iris)
self.send_signal("Evaluation Results", self.results_1_iris)
self.widget.selected_learner[:] = [0]
self.widget.set_results(None)
self.widget.set_results(self.results_1_iris)
self.send_signal("Evaluation Results", None)
self.send_signal("Evaluation Results", self.results_1_iris)
self.widget.selected_learner[:] = [0]

def test_outputs(self):
self.send_signal("Evaluation Results", self.results_1_iris)

# check selected data output
self.assertIsNone(self.get_output("Selected Data"))

# select data instances
self.widget.select_correct()

# check selected data output
selected = self.get_output("Selected Data")
self.assertGreater(len(selected), 0)

# check output when data is removed
self.send_signal("Evaluation Results", None)
self.assertIsNone(self.get_output("Selected Data"))

0 comments on commit 282b1b0

Please sign in to comment.