From c04212342ff267610ecc820bafb10df8b2c78ca9 Mon Sep 17 00:00:00 2001 From: Antonis Maronikolakis Date: Fri, 7 Apr 2017 19:30:38 +0300 Subject: [PATCH 1/5] Update test_learning.py --- tests/test_learning.py | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/tests/test_learning.py b/tests/test_learning.py index 4f618f7c1..f8a801f5a 100644 --- a/tests/test_learning.py +++ b/tests/test_learning.py @@ -52,23 +52,33 @@ def test_decision_tree_learner(): def test_neural_network_learner(): iris = DataSet(name="iris") - iris.remove_examples("virginica") - + classes = ["setosa","versicolor","virginica"] - iris.classes_to_numbers() + iris.classes_to_numbers(classes) + + nNL = NeuralNetLearner(iris, [5], 0.15, 75) + pred1 = nNL([5,3,1,0.1]) + pred2 = nNL([6,3,3,1.5]) + pred3 = nNL([7.5,4,6,2]) - nNL = NeuralNetLearner(iris) - # NeuralNetLearner might be wrong. Just check if prediction is in range. - assert nNL([5,3,1,0.1]) in range(len(classes)) + # NeuralNetLearner might be wrong. If it is, check if prediction is in range. + assert pred1 == 0 or pred1 in range(len(classes)) + assert pred2 == 1 or pred2 in range(len(classes)) + assert pred3 == 2 or pred3 in range(len(classes)) def test_perceptron(): iris = DataSet(name="iris") - iris.remove_examples("virginica") - - classes = ["setosa","versicolor","virginica"] iris.classes_to_numbers() + classes_number = len(iris.values[iris.target]) + perceptron = PerceptronLearner(iris) - # PerceptronLearner might be wrong. Just check if prediction is in range. - assert perceptron([5,3,1,0.1]) in range(len(classes)) + pred1 = perceptron([5,3,1,0.1]) + pred2 = perceptron([6,3,4,1]) + pred3 = perceptron([7.5,4,6,2]) + + # PerceptronLearner might be wrong. If it is, check if prediction is in range. + assert pred1 == 0 or pred1 in range(classes_number) + assert pred2 == 1 or pred2 in range(classes_number) + assert pred3 == 2 or pred3 in range(classes_number) From c3bc6eed2a3326ee770ffb54f120dc66e61f0db9 Mon Sep 17 00:00:00 2001 From: Antonis Maronikolakis Date: Fri, 7 Apr 2017 19:31:40 +0300 Subject: [PATCH 2/5] Update learning.py --- learning.py | 138 +++++++++++++++++++++++++++++++++------------------- 1 file changed, 88 insertions(+), 50 deletions(-) diff --git a/learning.py b/learning.py index ec685131d..a34c8edb0 100644 --- a/learning.py +++ b/learning.py @@ -465,7 +465,7 @@ def NeuralNetLearner(dataset, hidden_layer_sizes=[3], """ i_units = len(dataset.inputs) - o_units = 1 # As of now, dataset.target gives only one index. + o_units = len(dataset.values[dataset.target]) # construct a network raw_net = network(i_units, hidden_layer_sizes, o_units) @@ -490,49 +490,12 @@ def predict(example): # Hypothesis o_nodes = learned_net[-1] - pred = [o_nodes[i].value for i in range(o_units)] - return 1 if pred[0] >= 0.5 else 0 + prediction = find_max_node(o_nodes) + return prediction return predict -class NNUnit: - """Single Unit of Multiple Layer Neural Network - inputs: Incoming connections - weights: Weights to incoming connections - """ - - def __init__(self, weights=None, inputs=None): - self.weights = [] - self.inputs = [] - self.value = None - self.activation = sigmoid - - -def network(input_units, hidden_layer_sizes, output_units): - """Create Directed Acyclic Network of given number layers. - hidden_layers_sizes : List number of neuron units in each hidden layer - excluding input and output layers - """ - # Check for PerceptronLearner - if hidden_layer_sizes: - layers_sizes = [input_units] + hidden_layer_sizes + [output_units] - else: - layers_sizes = [input_units] + [output_units] - - net = [[NNUnit() for n in range(size)] - for size in layers_sizes] - n_layers = len(net) - - # Make Connection - for i in range(1, n_layers): - for n in net[i]: - for k in net[i-1]: - n.inputs.append(k) - n.weights.append(0) - return net - - def BackPropagationLearner(dataset, net, learning_rate, epochs): """[Figure 18.23] The back-propagation algorithm for multilayer network""" # Initialise weights @@ -547,17 +510,21 @@ def BackPropagationLearner(dataset, net, learning_rate, epochs): Changing dataset class will have effect on all the learners. Will be taken care of later ''' - idx_t = [dataset.target] - idx_i = dataset.inputs - n_layers = len(net) o_nodes = net[-1] i_nodes = net[0] + o_units = len(o_nodes) + idx_t = dataset.target + idx_i = dataset.inputs + n_layers = len(net) + + inputs, targets = init_examples(examples, idx_i, idx_t, o_units) for epoch in range(epochs): # Iterate over each example - for e in examples: - i_val = [e[i] for i in idx_i] - t_val = [e[i] for i in idx_t] + for e in range(len(examples)): + i_val = inputs[e] + t_val = targets[e] + # Activate input layer for v, n in zip(i_val, i_nodes): n.value = v @@ -573,7 +540,6 @@ def BackPropagationLearner(dataset, net, learning_rate, epochs): delta = [[] for i in range(n_layers)] # Compute outer layer delta - o_units = len(o_nodes) err = [t_val[i] - o_nodes[i].value for i in range(o_units)] delta[-1] = [(o_nodes[i].value) * (1 - o_nodes[i].value) * @@ -609,7 +575,7 @@ def BackPropagationLearner(dataset, net, learning_rate, epochs): def PerceptronLearner(dataset, learning_rate=0.01, epochs=100): """Logistic Regression, NO hidden layer""" i_units = len(dataset.inputs) - o_units = 1 # As of now, dataset.target gives only one index. + o_units = len(dataset.values[dataset.target]) hidden_layer_sizes = [] raw_net = network(i_units, hidden_layer_sizes, o_units) learned_net = BackPropagationLearner(dataset, raw_net, learning_rate, epochs) @@ -631,10 +597,82 @@ def predict(example): # Hypothesis o_nodes = learned_net[-1] - pred = [o_nodes[i].value for i in range(o_units)] - return 1 if pred[0] >= 0.5 else 0 + prediction = find_max_node(o_nodes) + return prediction return predict + + +class NNUnit: + """Single Unit of Multiple Layer Neural Network + inputs: Incoming connections + weights: Weights to incoming connections + """ + + def __init__(self, weights=None, inputs=None): + self.weights = [] + self.inputs = [] + self.value = None + self.activation = sigmoid + + +def network(input_units, hidden_layer_sizes, output_units): + """Create Directed Acyclic Network of given number layers. + hidden_layers_sizes : List number of neuron units in each hidden layer + excluding input and output layers + """ + # Check for PerceptronLearner + if hidden_layer_sizes: + layers_sizes = [input_units] + hidden_layer_sizes + [output_units] + else: + layers_sizes = [input_units] + [output_units] + + net = [[NNUnit() for n in range(size)] + for size in layers_sizes] + n_layers = len(net) + + # Make Connection + for i in range(1, n_layers): + for n in net[i]: + for k in net[i-1]: + n.inputs.append(k) + n.weights.append(0) + return net + + +def init_examples(examples, idx_i, idx_t, o_units): + inputs = {} + targets = {} + + for i in range(len(examples)): + e = examples[i] + # Input values of e + inputs[i] = [e[i] for i in idx_i] + + if o_units > 1: + # One-Hot representation of e's target + t = [0 for i in range(o_units)] + t[e[idx_t]] = 1 + targets[i] = t + else: + # Target value of e + targets[i] = [e[idx_t]] + + return inputs, targets + + +def find_max_node(nodes): + index = -1 + max_score = -10000 + # Find max hypothesis + for i in range(len(nodes)): + curr_score = nodes[i].value + if curr_score > max_score: + index = i + max_score = curr_score + + return index + # ______________________________________________________________________________ From b161b8b0a2fa48177b83bb925fb0ce513d6f96df Mon Sep 17 00:00:00 2001 From: Antonis Maronikolakis Date: Sat, 8 Apr 2017 11:52:40 +0300 Subject: [PATCH 3/5] set max_score to -1 (for now) --- learning.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/learning.py b/learning.py index a34c8edb0..c51523a58 100644 --- a/learning.py +++ b/learning.py @@ -663,7 +663,7 @@ def init_examples(examples, idx_i, idx_t, o_units): def find_max_node(nodes): index = -1 - max_score = -10000 + max_score = -1 # Find max hypothesis for i in range(len(nodes)): curr_score = nodes[i].value From a13b9e97fb060f9d70483982f98f1d2c05e37a6c Mon Sep 17 00:00:00 2001 From: Antonis Maronikolakis Date: Sat, 8 Apr 2017 11:54:14 +0300 Subject: [PATCH 4/5] Update learning.py --- learning.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/learning.py b/learning.py index c51523a58..1321994ee 100644 --- a/learning.py +++ b/learning.py @@ -663,7 +663,7 @@ def init_examples(examples, idx_i, idx_t, o_units): def find_max_node(nodes): index = -1 - max_score = -1 + max_score = 0 # Find max hypothesis for i in range(len(nodes)): curr_score = nodes[i].value From b6be1c73f330d7f0a9662e90e63532cbb5468edc Mon Sep 17 00:00:00 2001 From: Antonis Maronikolakis Date: Sat, 8 Apr 2017 17:04:28 +0300 Subject: [PATCH 5/5] Make find_max more pythonic --- learning.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/learning.py b/learning.py index 1321994ee..6c733e25a 100644 --- a/learning.py +++ b/learning.py @@ -662,16 +662,7 @@ def init_examples(examples, idx_i, idx_t, o_units): def find_max_node(nodes): - index = -1 - max_score = 0 - # Find max hypothesis - for i in range(len(nodes)): - curr_score = nodes[i].value - if curr_score > max_score: - index = i - max_score = curr_score - - return index + return nodes.index(argmax(nodes, key=lambda node: node.value)) # ______________________________________________________________________________ pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy