Skip to content

Commit

Permalink
test_evaluation_scoring: Improve and expand tests
Browse files Browse the repository at this point in the history
  • Loading branch information
lanzagar committed Jun 2, 2017
1 parent 227d491 commit 8e92a28
Showing 1 changed file with 158 additions and 52 deletions.
210 changes: 158 additions & 52 deletions Orange/tests/test_evaluation_scoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,26 +13,175 @@
from Orange.preprocess import discretize, Discretize


class TestPrecision(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.iris = Table('iris')

def test_precision_iris(self):
learner = LogisticRegressionLearner(preprocessors=[])
res = TestOnTrainingData(self.iris, [learner])
self.assertAlmostEqual(Precision(res, average='weighted')[0],
0.96189, 5)
self.assertAlmostEqual(Precision(res, target=1)[0], 0.97826, 5)
self.assertAlmostEqual(Precision(res, target=1, average=None)[0],
0.97826, 5)
self.assertAlmostEqual(Precision(res, target=1, average='weighted')[0],
0.97826, 5)
self.assertAlmostEqual(Precision(res, target=0, average=None)[0], 1, 5)
self.assertAlmostEqual(Precision(res, target=2, average=None)[0],
0.90741, 5)

def test_precision_multiclass(self):
results = Results(
domain=Domain([], DiscreteVariable(name="y", values="01234")),
actual=[0, 4, 4, 1, 2, 0, 1, 2, 3, 2])
results.predicted = np.array([[0, 4, 4, 1, 2, 0, 1, 2, 3, 2],
[0, 1, 4, 1, 1, 0, 0, 2, 3, 1]])
res = Precision(results, average='weighted')
self.assertEqual(res[0], 1.)
self.assertAlmostEqual(res[1], 0.78333, 5)

for target, prob in ((0, 2 / 3),
(1, 1 / 4),
(2, 1 / 1),
(3, 1 / 1),
(4, 1 / 1)):
res = Precision(results, target=target, average=None)
self.assertEqual(res[0], 1.)
self.assertEqual(res[1], prob)

def test_precision_binary(self):
results = Results(
domain=Domain([], DiscreteVariable(name="y", values="01")),
actual=[0, 1, 1, 1, 0, 0, 1, 0, 0, 1])
results.predicted = np.array([[0, 1, 1, 1, 0, 0, 1, 0, 0, 1],
[0, 1, 1, 1, 0, 0, 1, 1, 1, 0]])
res = Precision(results)
self.assertEqual(res[0], 1.)
self.assertAlmostEqual(res[1], 4 / 6)
res_target = Precision(results, target=1)
self.assertEqual(res[0], res_target[0])
self.assertEqual(res[1], res_target[1])
res_target = Precision(results, target=0)
self.assertEqual(res_target[0], 1.)
self.assertAlmostEqual(res_target[1], 3 / 4)
res_target = Precision(results, average='macro')
self.assertEqual(res_target[0], 1.)
self.assertAlmostEqual(res_target[1], (4 / 6 + 3 / 4) / 2)


class TestRecall(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.data = Table('iris')
cls.iris = Table('iris')

def test_recall(self):
def test_recall_iris(self):
learner = LogisticRegressionLearner(preprocessors=[])
results = TestOnTrainingData(self.data, [learner])
self.assertAlmostEqual(Recall(results)[0], 0.960, 3)
res = TestOnTrainingData(self.iris, [learner])
self.assertAlmostEqual(Recall(res, average='weighted')[0],
0.96, 5)
self.assertAlmostEqual(Recall(res, target=1)[0], 0.9, 5)
self.assertAlmostEqual(Recall(res, target=1, average=None)[0],
0.9, 5)
self.assertAlmostEqual(Recall(res, target=1, average='weighted')[0],
0.9, 5)
self.assertAlmostEqual(Recall(res, target=0, average=None)[0], 1, 5)
self.assertAlmostEqual(Recall(res, target=2, average=None)[0],
0.98, 5)

def test_recall_multiclass(self):
results = Results(
domain=Domain([], DiscreteVariable(name="y", values="01234")),
actual=[0, 4, 4, 1, 2, 0, 1, 2, 3, 2])
results.predicted = np.array([[0, 4, 4, 1, 2, 0, 1, 2, 3, 2],
[0, 1, 4, 1, 1, 0, 0, 2, 3, 1]])
res = Recall(results, average='weighted')
self.assertEqual(res[0], 1.)
self.assertAlmostEqual(res[1], 0.6)

for target, prob in ((0, 2 / 2),
(1, 1 / 2),
(2, 1 / 3),
(3, 1 / 1),
(4, 1 / 2)):
res = Recall(results, target=target)
self.assertEqual(res[0], 1.)
self.assertEqual(res[1], prob)

def test_recall_binary(self):
results = Results(
domain=Domain([], DiscreteVariable(name="y", values="01")),
actual=[0, 1, 1, 1, 0, 0, 1, 0, 0, 1])
results.predicted = np.array([[0, 1, 1, 1, 0, 0, 1, 0, 0, 1],
[0, 1, 1, 1, 0, 0, 1, 1, 1, 0]])
res = Recall(results)
self.assertEqual(res[0], 1.)
self.assertAlmostEqual(res[1], 4 / 5)
res_target = Recall(results, target=1)
self.assertEqual(res[0], res_target[0])
self.assertEqual(res[1], res_target[1])
res_target = Recall(results, target=0)
self.assertEqual(res_target[0], 1.)
self.assertAlmostEqual(res_target[1], 3 / 5)
res_target = Recall(results, average='macro')
self.assertEqual(res_target[0], 1.)
self.assertAlmostEqual(res_target[1], (4 / 5 + 3 / 5) / 2)


class TestPrecision(unittest.TestCase):
class TestF1(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.data = Table('iris')
cls.iris = Table('iris')

def test_precision(self):
def test_recall_iris(self):
learner = LogisticRegressionLearner(preprocessors=[])
results = TestOnTrainingData(self.data, [learner])
self.assertAlmostEqual(Precision(results)[0], 0.962, 3)
res = TestOnTrainingData(self.iris, [learner])
self.assertAlmostEqual(F1(res, average='weighted')[0],
0.959935, 5)
self.assertAlmostEqual(F1(res, target=1)[0], 0.9375, 5)
self.assertAlmostEqual(F1(res, target=1, average=None)[0],
0.9375, 5)
self.assertAlmostEqual(F1(res, target=1, average='weighted')[0],
0.9375, 5)
self.assertAlmostEqual(F1(res, target=0, average=None)[0], 1, 5)
self.assertAlmostEqual(F1(res, target=2, average=None)[0],
0.942307, 5)

def test_F1_multiclass(self):
results = Results(
domain=Domain([], DiscreteVariable(name="y", values="01234")),
actual=[0, 4, 4, 1, 2, 0, 1, 2, 3, 2])
results.predicted = np.array([[0, 4, 4, 1, 2, 0, 1, 2, 3, 2],
[0, 1, 4, 1, 1, 0, 0, 2, 3, 1]])
res = F1(results, average='weighted')
self.assertEqual(res[0], 1.)
self.assertAlmostEqual(res[1], 0.61)

for target, prob in ((0, 4 / 5),
(1, 1 / 3),
(2, 1 / 2),
(3, 1.),
(4, 2 / 3)):
res = F1(results, target=target)
self.assertEqual(res[0], 1.)
self.assertEqual(res[1], prob)

def test_F1_binary(self):
results = Results(
domain=Domain([], DiscreteVariable(name="y", values="01")),
actual=[0, 1, 1, 1, 0, 0, 1, 0, 0, 1])
results.predicted = np.array([[0, 1, 1, 1, 0, 0, 1, 0, 0, 1],
[0, 1, 1, 1, 0, 0, 1, 1, 1, 1]])
res = F1(results)
self.assertEqual(res[0], 1.)
self.assertAlmostEqual(res[1], 5 / 6)
res_target = F1(results, target=1)
self.assertEqual(res[0], res_target[0])
self.assertEqual(res[1], res_target[1])
res_target = F1(results, target=0)
self.assertEqual(res_target[0], 1.)
self.assertAlmostEqual(res_target[1], 3 / 4)


class TestCA(unittest.TestCase):
Expand Down Expand Up @@ -177,48 +326,5 @@ def test_log_loss_calc(self):
self.assertAlmostEqual(ll_calc, ll_orange[0])


class TestF1(unittest.TestCase):
def test_F1_multiclass(self):
results = Results(
domain=Domain([], DiscreteVariable(name="y", values="01234")),
actual=[0, 4, 4, 1, 2, 0, 1, 2, 3, 2])
results.predicted = np.array([[0, 1, 4, 1, 1, 0, 0, 2, 3, 1],
[0, 4, 4, 1, 2, 0, 1, 2, 3, 2]])
res = F1(results)
self.assertAlmostEqual(res[0], 0.61)
self.assertEqual(res[1], 1.)

def test_F1_target(self):
results = Results(
domain=Domain([], DiscreteVariable(name="y", values="01234")),
actual=[0, 4, 4, 1, 2, 0, 1, 2, 3, 2])
results.predicted = np.array([[0, 1, 4, 1, 1, 0, 0, 2, 3, 1],
[0, 4, 4, 1, 2, 0, 1, 2, 3, 2]])

for target, prob in ((0, 4 / 5),
(1, 1 / 3),
(2, 1 / 2),
(3, 1.),
(4, 2 / 3)):
res = F1(results, target=target)
self.assertEqual(res[0], prob)
self.assertEqual(res[1], 1.)

def test_F1_binary(self):
results = Results(
domain=Domain([], DiscreteVariable(name="y", values="01")),
actual=[0, 1, 1, 1, 0, 0, 1, 0, 0, 1])
results.predicted = np.array([[0, 1, 1, 1, 0, 0, 1, 0, 0, 1],
[0, 1, 1, 1, 0, 0, 1, 1, 1, 1]])
res = F1(results)
self.assertEqual(res[0], 1.)
self.assertAlmostEqual(res[1], 5 / 6)
res_target = F1(results, target=1)
self.assertEqual(res[0], res_target[0])
self.assertEqual(res[1], res_target[1])
res_target = F1(results, target=0)
self.assertEqual(res_target[0], 1.)
self.assertAlmostEqual(res_target[1], 3 / 4)

if __name__ == '__main__':
unittest.main()

0 comments on commit 8e92a28

Please sign in to comment.