1
- from learning import parse_csv , weighted_mode , weighted_replicate , DataSet , \
2
- PluralityLearner , NaiveBayesLearner , NearestNeighborLearner , \
3
- NeuralNetLearner , PerceptronLearner , DecisionTreeLearner , \
4
- euclidean_distance , grade_learner , err_ratio , random_weights
1
+
2
+ import pytest
3
+ import math
5
4
from utils import DataFile
5
+ from learning import (parse_csv , weighted_mode , weighted_replicate , DataSet ,
6
+ PluralityLearner , NaiveBayesLearner , NearestNeighborLearner ,
7
+ rms_error , manhattan_distance , mean_boolean_error , mean_error )
6
8
7
9
8
10
@@ -74,16 +76,43 @@ def test_naive_bayes():
74
76
75
77
def test_k_nearest_neighbors ():
76
78
iris = DataSet (name = "iris" )
77
-
78
79
kNN = NearestNeighborLearner (iris ,k = 3 )
80
+ assert kNN ([5 ,3 ,1 ,0.1 ]) == "setosa"
79
81
assert kNN ([5 , 3 , 1 , 0.1 ]) == "setosa"
80
82
assert kNN ([6 , 5 , 3 , 1.5 ]) == "versicolor"
81
83
assert kNN ([7.5 , 4 , 6 , 2 ]) == "virginica"
82
84
85
+ def test_rms_error ():
86
+ assert rms_error ([2 ,2 ], [2 ,2 ]) == 0
87
+ assert rms_error ((0 ,0 ), (0 ,1 )) == math .sqrt (0.5 )
88
+ assert rms_error ((1 ,0 ), (0 ,1 )) == 1
89
+ assert rms_error ((0 ,0 ), (0 ,- 1 )) == math .sqrt (0.5 )
90
+ assert rms_error ((0 ,0.5 ), (0 ,- 0.5 )) == math .sqrt (0.5 )
91
+
92
+ def test_manhattan_distance ():
93
+ assert manhattan_distance ([2 ,2 ], [2 ,2 ]) == 0
94
+ assert manhattan_distance ([0 ,0 ], [0 ,1 ]) == 1
95
+ assert manhattan_distance ([1 ,0 ], [0 ,1 ]) == 2
96
+ assert manhattan_distance ([0 ,0 ], [0 ,- 1 ]) == 1
97
+ assert manhattan_distance ([0 ,0.5 ], [0 ,- 0.5 ]) == 1
98
+
99
+ def test_mean_boolean_error ():
100
+ assert mean_boolean_error ([1 ,1 ], [0 ,0 ]) == 1
101
+ assert mean_boolean_error ([0 ,1 ], [1 ,0 ]) == 1
102
+ assert mean_boolean_error ([1 ,1 ], [0 ,1 ]) == 0.5
103
+ assert mean_boolean_error ([0 ,0 ], [0 ,0 ]) == 0
104
+ assert mean_boolean_error ([1 ,1 ], [1 ,1 ]) == 0
105
+
106
+ def test_mean_error ():
107
+ assert mean_error ([2 ,2 ], [2 ,2 ]) == 0
108
+ assert mean_error ([0 ,0 ], [0 ,1 ]) == 0.5
109
+ assert mean_error ([1 ,0 ], [0 ,1 ]) == 1
110
+ assert mean_error ([0 ,0 ], [0 ,- 1 ]) == 0.5
111
+ assert mean_error ([0 ,0.5 ], [0 ,- 0.5 ]) == 0.5
112
+
83
113
84
114
def test_decision_tree_learner ():
85
115
iris = DataSet (name = "iris" )
86
-
87
116
dTL = DecisionTreeLearner (iris )
88
117
assert dTL ([5 , 3 , 1 , 0.1 ]) == "setosa"
89
118
assert dTL ([6 , 5 , 3 , 1.5 ]) == "versicolor"
@@ -92,36 +121,30 @@ def test_decision_tree_learner():
92
121
93
122
def test_neural_network_learner ():
94
123
iris = DataSet (name = "iris" )
95
-
96
124
classes = ["setosa" ,"versicolor" ,"virginica" ]
97
125
iris .classes_to_numbers (classes )
98
-
99
126
nNL = NeuralNetLearner (iris , [5 ], 0.15 , 75 )
100
127
tests = [([5 , 3 , 1 , 0.1 ], 0 ),
101
128
([5 , 3.5 , 1 , 0 ], 0 ),
102
129
([6 , 3 , 4 , 1.1 ], 1 ),
103
130
([6 , 2 , 3.5 , 1 ], 1 ),
104
131
([7.5 , 4 , 6 , 2 ], 2 ),
105
132
([7 , 3 , 6 , 2.5 ], 2 )]
106
-
107
133
assert grade_learner (nNL , tests ) >= 2 / 3
108
134
assert err_ratio (nNL , iris ) < 0.25
109
135
110
136
111
137
def test_perceptron ():
112
138
iris = DataSet (name = "iris" )
113
139
iris .classes_to_numbers ()
114
-
115
140
classes_number = len (iris .values [iris .target ])
116
-
117
141
perceptron = PerceptronLearner (iris )
118
142
tests = [([5 , 3 , 1 , 0.1 ], 0 ),
119
143
([5 , 3.5 , 1 , 0 ], 0 ),
120
144
([6 , 3 , 4 , 1.1 ], 1 ),
121
145
([6 , 2 , 3.5 , 1 ], 1 ),
122
146
([7.5 , 4 , 6 , 2 ], 2 ),
123
147
([7 , 3 , 6 , 2.5 ], 2 )]
124
-
125
148
assert grade_learner (perceptron , tests ) > 1 / 2
126
149
assert err_ratio (perceptron , iris ) < 0.4
127
150
@@ -130,12 +153,8 @@ def test_random_weights():
130
153
min_value = - 0.5
131
154
max_value = 0.5
132
155
num_weights = 10
133
-
134
156
test_weights = random_weights (min_value , max_value , num_weights )
135
-
136
157
assert len (test_weights ) == num_weights
137
-
138
158
for weight in test_weights :
139
159
assert weight >= min_value and weight <= max_value
140
-
141
-
160
+
0 commit comments