Mercurial > hg > tvii
annotate tests/test_logistic_regression.py @ 59:161030d8cd4e
require tensorflow
| author | Jeff Hammel <k0scist@gmail.com> | 
|---|---|
| date | Sat, 02 Dec 2017 10:21:18 -0800 | 
| parents | 0f29b02f4806 | 
| children | 
| rev | line source | 
|---|---|
| 11 | 1 #!/usr/bin/env python | 
| 2 | |
| 3 """ | |
| 4 test logistic regression | |
| 5 """ | |
| 6 | |
| 22 
3713c6733990
[logistic regression] introduce illustrative test
 Jeff Hammel <k0scist@gmail.com> parents: 
11diff
changeset | 7 import numpy as np | 
| 11 | 8 import os | 
| 9 import unittest | |
| 10 from tvii import logistic_regression | |
| 11 | |
| 28 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 12 | 
| 11 | 13 class LogisticRegresionTests(unittest.TestCase): | 
| 22 
3713c6733990
[logistic regression] introduce illustrative test
 Jeff Hammel <k0scist@gmail.com> parents: 
11diff
changeset | 14 | 
| 31 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 15 def compare_arrays(self, a, b): | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 16 assert a.shape == b.shape | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 17 for x, y in zip(a.flatten(), | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 18 b.flatten()): | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 19 self.assertAlmostEqual(x, y) | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 20 | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 21 | 
| 22 
3713c6733990
[logistic regression] introduce illustrative test
 Jeff Hammel <k0scist@gmail.com> parents: 
11diff
changeset | 22 def test_cost(self): | 
| 
3713c6733990
[logistic regression] introduce illustrative test
 Jeff Hammel <k0scist@gmail.com> parents: 
11diff
changeset | 23 """test cost function""" | 
| 
3713c6733990
[logistic regression] introduce illustrative test
 Jeff Hammel <k0scist@gmail.com> parents: 
11diff
changeset | 24 | 
| 
3713c6733990
[logistic regression] introduce illustrative test
 Jeff Hammel <k0scist@gmail.com> parents: 
11diff
changeset | 25 w, b, X, Y = (np.array([[1],[2]]), | 
| 
3713c6733990
[logistic regression] introduce illustrative test
 Jeff Hammel <k0scist@gmail.com> parents: 
11diff
changeset | 26 2, | 
| 
3713c6733990
[logistic regression] introduce illustrative test
 Jeff Hammel <k0scist@gmail.com> parents: 
11diff
changeset | 27 np.array([[1,2],[3,4]]), | 
| 
3713c6733990
[logistic regression] introduce illustrative test
 Jeff Hammel <k0scist@gmail.com> parents: 
11diff
changeset | 28 np.array([[1,0]])) | 
| 
3713c6733990
[logistic regression] introduce illustrative test
 Jeff Hammel <k0scist@gmail.com> parents: 
11diff
changeset | 29 | 
| 
3713c6733990
[logistic regression] introduce illustrative test
 Jeff Hammel <k0scist@gmail.com> parents: 
11diff
changeset | 30 expected_cost = 6.000064773192205 | 
| 
3713c6733990
[logistic regression] introduce illustrative test
 Jeff Hammel <k0scist@gmail.com> parents: 
11diff
changeset | 31 cost = logistic_regression.cost_function(w, b, X, Y) | 
| 23 
f34110e28a0a
[logistic regression] we have a working cost function
 Jeff Hammel <k0scist@gmail.com> parents: 
22diff
changeset | 32 assert abs(cost - expected_cost) < 1e-6 | 
| 11 | 33 | 
| 28 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 34 def test_propagate(self): | 
| 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 35 """test canned logistic regression example""" | 
| 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 36 | 
| 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 37 # sample variables | 
| 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 38 w = np.array([[1],[2]]) | 
| 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 39 b = 2 | 
| 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 40 X = np.array([[1,2],[3,4]]) | 
| 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 41 Y = np.array([[1,0]]) | 
| 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 42 | 
| 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 43 # calculate gradient and cost | 
| 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 44 grads, cost = logistic_regression.propagate(w, b, X, Y) | 
| 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 45 | 
| 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 46 # compare to expected, | 
| 29 | 47 dw_expected = np.array([[ 0.99993216], [ 1.99980262]]) | 
| 28 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 48 db_expected = 0.499935230625 | 
| 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 49 cost_expected = 6.000064773192205 | 
| 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 50 | 
| 29 | 51 self.assertAlmostEqual(cost_expected, cost) | 
| 52 self.assertAlmostEqual(grads['db'], db_expected) | |
| 53 assert grads['dw'].shape == dw_expected.shape | |
| 54 for a, b in zip(grads['dw'].flatten(), | |
| 55 dw_expected.flatten()): | |
| 56 self.assertAlmostEqual(a, b) | |
| 57 | |
| 31 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 58 def test_optimize(self): | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 59 """test gradient descent method""" | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 60 | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 61 # test examples | 
| 32 
0f29b02f4806
[logistic regression] add model
 Jeff Hammel <k0scist@gmail.com> parents: 
31diff
changeset | 62 w, b, X, Y = (np.array([[1],[2]]), 2, np.array([[1,2],[3,4]]), np.array([[1,0]])) | 
| 31 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 63 | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 64 params, grads, costs = logistic_regression.optimize(w, b, X, Y, num_iterations= 100, learning_rate = 0.009, print_cost = False) | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 65 | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 66 # expected output | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 67 w_expected = np.array([[0.1124579 ], | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 68 [0.23106775]]) | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 69 dw_expected = np.array([[ 0.90158428], | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 70 [ 1.76250842]]) | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 71 b_expected = 1.55930492484 | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 72 db_expected = 0.430462071679 | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 73 | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 74 # compare output | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 75 self.assertAlmostEqual(params['b'], b_expected) | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 76 self.assertAlmostEqual(grads['db'], db_expected) | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 77 self.compare_arrays(w_expected, params['w']) | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 78 self.compare_arrays(dw_expected, grads['dw']) | 
| 
fa7a51df0d90
[logistic regression] test gradient descent
 Jeff Hammel <k0scist@gmail.com> parents: 
29diff
changeset | 79 | 
| 32 
0f29b02f4806
[logistic regression] add model
 Jeff Hammel <k0scist@gmail.com> parents: 
31diff
changeset | 80 def test_predict(self): | 
| 
0f29b02f4806
[logistic regression] add model
 Jeff Hammel <k0scist@gmail.com> parents: 
31diff
changeset | 81 | 
| 
0f29b02f4806
[logistic regression] add model
 Jeff Hammel <k0scist@gmail.com> parents: 
31diff
changeset | 82 w, b, X, Y = (np.array([[1],[2]]), 2, np.array([[1,2],[3,4]]), np.array([[1,0]])) | 
| 
0f29b02f4806
[logistic regression] add model
 Jeff Hammel <k0scist@gmail.com> parents: 
31diff
changeset | 83 | 
| 
0f29b02f4806
[logistic regression] add model
 Jeff Hammel <k0scist@gmail.com> parents: 
31diff
changeset | 84 predictions = logistic_regression.predict(w, b, X) | 
| 
0f29b02f4806
[logistic regression] add model
 Jeff Hammel <k0scist@gmail.com> parents: 
31diff
changeset | 85 | 
| 
0f29b02f4806
[logistic regression] add model
 Jeff Hammel <k0scist@gmail.com> parents: 
31diff
changeset | 86 assert predictions[0][0] == 1 | 
| 
0f29b02f4806
[logistic regression] add model
 Jeff Hammel <k0scist@gmail.com> parents: 
31diff
changeset | 87 assert predictions[0][1] == 1 | 
| 28 
77f68c241b37
[logistic regression] propagate
 Jeff Hammel <k0scist@gmail.com> parents: 
23diff
changeset | 88 | 
| 11 | 89 if __name__ == '__main__': | 
| 90 unittest.main() | 
