diff --git a/learning.py b/learning.py index ec685131d..6c733e25a 100644 --- a/learning.py +++ b/learning.py @@ -465,7 +465,7 @@ def NeuralNetLearner(dataset, hidden_layer_sizes=[3], """ i_units = len(dataset.inputs) - o_units = 1 # As of now, dataset.target gives only one index. + o_units = len(dataset.values[dataset.target]) # construct a network raw_net = network(i_units, hidden_layer_sizes, o_units) @@ -490,49 +490,12 @@ def predict(example): # Hypothesis o_nodes = learned_net[-1] - pred = [o_nodes[i].value for i in range(o_units)] - return 1 if pred[0] >= 0.5 else 0 + prediction = find_max_node(o_nodes) + return prediction return predict -class NNUnit: - """Single Unit of Multiple Layer Neural Network - inputs: Incoming connections - weights: Weights to incoming connections - """ - - def __init__(self, weights=None, inputs=None): - self.weights = [] - self.inputs = [] - self.value = None - self.activation = sigmoid - - -def network(input_units, hidden_layer_sizes, output_units): - """Create Directed Acyclic Network of given number layers. - hidden_layers_sizes : List number of neuron units in each hidden layer - excluding input and output layers - """ - # Check for PerceptronLearner - if hidden_layer_sizes: - layers_sizes = [input_units] + hidden_layer_sizes + [output_units] - else: - layers_sizes = [input_units] + [output_units] - - net = [[NNUnit() for n in range(size)] - for size in layers_sizes] - n_layers = len(net) - - # Make Connection - for i in range(1, n_layers): - for n in net[i]: - for k in net[i-1]: - n.inputs.append(k) - n.weights.append(0) - return net - - def BackPropagationLearner(dataset, net, learning_rate, epochs): """[Figure 18.23] The back-propagation algorithm for multilayer network""" # Initialise weights @@ -547,17 +510,21 @@ def BackPropagationLearner(dataset, net, learning_rate, epochs): Changing dataset class will have effect on all the learners. Will be taken care of later ''' - idx_t = [dataset.target] - idx_i = dataset.inputs - n_layers = len(net) o_nodes = net[-1] i_nodes = net[0] + o_units = len(o_nodes) + idx_t = dataset.target + idx_i = dataset.inputs + n_layers = len(net) + + inputs, targets = init_examples(examples, idx_i, idx_t, o_units) for epoch in range(epochs): # Iterate over each example - for e in examples: - i_val = [e[i] for i in idx_i] - t_val = [e[i] for i in idx_t] + for e in range(len(examples)): + i_val = inputs[e] + t_val = targets[e] + # Activate input layer for v, n in zip(i_val, i_nodes): n.value = v @@ -573,7 +540,6 @@ def BackPropagationLearner(dataset, net, learning_rate, epochs): delta = [[] for i in range(n_layers)] # Compute outer layer delta - o_units = len(o_nodes) err = [t_val[i] - o_nodes[i].value for i in range(o_units)] delta[-1] = [(o_nodes[i].value) * (1 - o_nodes[i].value) * @@ -609,7 +575,7 @@ def BackPropagationLearner(dataset, net, learning_rate, epochs): def PerceptronLearner(dataset, learning_rate=0.01, epochs=100): """Logistic Regression, NO hidden layer""" i_units = len(dataset.inputs) - o_units = 1 # As of now, dataset.target gives only one index. + o_units = len(dataset.values[dataset.target]) hidden_layer_sizes = [] raw_net = network(i_units, hidden_layer_sizes, o_units) learned_net = BackPropagationLearner(dataset, raw_net, learning_rate, epochs) @@ -631,10 +597,73 @@ def predict(example): # Hypothesis o_nodes = learned_net[-1] - pred = [o_nodes[i].value for i in range(o_units)] - return 1 if pred[0] >= 0.5 else 0 + prediction = find_max_node(o_nodes) + return prediction return predict + + +class NNUnit: + """Single Unit of Multiple Layer Neural Network + inputs: Incoming connections + weights: Weights to incoming connections + """ + + def __init__(self, weights=None, inputs=None): + self.weights = [] + self.inputs = [] + self.value = None + self.activation = sigmoid + + +def network(input_units, hidden_layer_sizes, output_units): + """Create Directed Acyclic Network of given number layers. + hidden_layers_sizes : List number of neuron units in each hidden layer + excluding input and output layers + """ + # Check for PerceptronLearner + if hidden_layer_sizes: + layers_sizes = [input_units] + hidden_layer_sizes + [output_units] + else: + layers_sizes = [input_units] + [output_units] + + net = [[NNUnit() for n in range(size)] + for size in layers_sizes] + n_layers = len(net) + + # Make Connection + for i in range(1, n_layers): + for n in net[i]: + for k in net[i-1]: + n.inputs.append(k) + n.weights.append(0) + return net + + +def init_examples(examples, idx_i, idx_t, o_units): + inputs = {} + targets = {} + + for i in range(len(examples)): + e = examples[i] + # Input values of e + inputs[i] = [e[i] for i in idx_i] + + if o_units > 1: + # One-Hot representation of e's target + t = [0 for i in range(o_units)] + t[e[idx_t]] = 1 + targets[i] = t + else: + # Target value of e + targets[i] = [e[idx_t]] + + return inputs, targets + + +def find_max_node(nodes): + return nodes.index(argmax(nodes, key=lambda node: node.value)) + # ______________________________________________________________________________ diff --git a/tests/test_learning.py b/tests/test_learning.py index 4f618f7c1..f8a801f5a 100644 --- a/tests/test_learning.py +++ b/tests/test_learning.py @@ -52,23 +52,33 @@ def test_decision_tree_learner(): def test_neural_network_learner(): iris = DataSet(name="iris") - iris.remove_examples("virginica") - + classes = ["setosa","versicolor","virginica"] - iris.classes_to_numbers() + iris.classes_to_numbers(classes) + + nNL = NeuralNetLearner(iris, [5], 0.15, 75) + pred1 = nNL([5,3,1,0.1]) + pred2 = nNL([6,3,3,1.5]) + pred3 = nNL([7.5,4,6,2]) - nNL = NeuralNetLearner(iris) - # NeuralNetLearner might be wrong. Just check if prediction is in range. - assert nNL([5,3,1,0.1]) in range(len(classes)) + # NeuralNetLearner might be wrong. If it is, check if prediction is in range. + assert pred1 == 0 or pred1 in range(len(classes)) + assert pred2 == 1 or pred2 in range(len(classes)) + assert pred3 == 2 or pred3 in range(len(classes)) def test_perceptron(): iris = DataSet(name="iris") - iris.remove_examples("virginica") - - classes = ["setosa","versicolor","virginica"] iris.classes_to_numbers() + classes_number = len(iris.values[iris.target]) + perceptron = PerceptronLearner(iris) - # PerceptronLearner might be wrong. Just check if prediction is in range. - assert perceptron([5,3,1,0.1]) in range(len(classes)) + pred1 = perceptron([5,3,1,0.1]) + pred2 = perceptron([6,3,4,1]) + pred3 = perceptron([7.5,4,6,2]) + + # PerceptronLearner might be wrong. If it is, check if prediction is in range. + assert pred1 == 0 or pred1 in range(classes_number) + assert pred2 == 1 or pred2 in range(classes_number) + assert pred3 == 2 or pred3 in range(classes_number)
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
Alternative Proxies: