Skip to content

Implementation: Multi-Class Backpropagation #486

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Apr 12, 2017
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Update learning.py
  • Loading branch information
antmarakis authored Apr 7, 2017
commit c3bc6eed2a3326ee770ffb54f120dc66e61f0db9
138 changes: 88 additions & 50 deletions learning.py
Original file line number Diff line number Diff line change
Expand Up @@ -465,7 +465,7 @@ def NeuralNetLearner(dataset, hidden_layer_sizes=[3],
"""

i_units = len(dataset.inputs)
o_units = 1 # As of now, dataset.target gives only one index.
o_units = len(dataset.values[dataset.target])

# construct a network
raw_net = network(i_units, hidden_layer_sizes, o_units)
Expand All @@ -490,49 +490,12 @@ def predict(example):

# Hypothesis
o_nodes = learned_net[-1]
pred = [o_nodes[i].value for i in range(o_units)]
return 1 if pred[0] >= 0.5 else 0
prediction = find_max_node(o_nodes)
return prediction

return predict


class NNUnit:
"""Single Unit of Multiple Layer Neural Network
inputs: Incoming connections
weights: Weights to incoming connections
"""

def __init__(self, weights=None, inputs=None):
self.weights = []
self.inputs = []
self.value = None
self.activation = sigmoid


def network(input_units, hidden_layer_sizes, output_units):
"""Create Directed Acyclic Network of given number layers.
hidden_layers_sizes : List number of neuron units in each hidden layer
excluding input and output layers
"""
# Check for PerceptronLearner
if hidden_layer_sizes:
layers_sizes = [input_units] + hidden_layer_sizes + [output_units]
else:
layers_sizes = [input_units] + [output_units]

net = [[NNUnit() for n in range(size)]
for size in layers_sizes]
n_layers = len(net)

# Make Connection
for i in range(1, n_layers):
for n in net[i]:
for k in net[i-1]:
n.inputs.append(k)
n.weights.append(0)
return net


def BackPropagationLearner(dataset, net, learning_rate, epochs):
"""[Figure 18.23] The back-propagation algorithm for multilayer network"""
# Initialise weights
Expand All @@ -547,17 +510,21 @@ def BackPropagationLearner(dataset, net, learning_rate, epochs):
Changing dataset class will have effect on all the learners.
Will be taken care of later
'''
idx_t = [dataset.target]
idx_i = dataset.inputs
n_layers = len(net)
o_nodes = net[-1]
i_nodes = net[0]
o_units = len(o_nodes)
idx_t = dataset.target
idx_i = dataset.inputs
n_layers = len(net)

inputs, targets = init_examples(examples, idx_i, idx_t, o_units)

for epoch in range(epochs):
# Iterate over each example
for e in examples:
i_val = [e[i] for i in idx_i]
t_val = [e[i] for i in idx_t]
for e in range(len(examples)):
i_val = inputs[e]
t_val = targets[e]

# Activate input layer
for v, n in zip(i_val, i_nodes):
n.value = v
Expand All @@ -573,7 +540,6 @@ def BackPropagationLearner(dataset, net, learning_rate, epochs):
delta = [[] for i in range(n_layers)]

# Compute outer layer delta
o_units = len(o_nodes)
err = [t_val[i] - o_nodes[i].value
for i in range(o_units)]
delta[-1] = [(o_nodes[i].value) * (1 - o_nodes[i].value) *
Expand Down Expand Up @@ -609,7 +575,7 @@ def BackPropagationLearner(dataset, net, learning_rate, epochs):
def PerceptronLearner(dataset, learning_rate=0.01, epochs=100):
"""Logistic Regression, NO hidden layer"""
i_units = len(dataset.inputs)
o_units = 1 # As of now, dataset.target gives only one index.
o_units = len(dataset.values[dataset.target])
hidden_layer_sizes = []
raw_net = network(i_units, hidden_layer_sizes, o_units)
learned_net = BackPropagationLearner(dataset, raw_net, learning_rate, epochs)
Expand All @@ -631,10 +597,82 @@ def predict(example):

# Hypothesis
o_nodes = learned_net[-1]
pred = [o_nodes[i].value for i in range(o_units)]
return 1 if pred[0] >= 0.5 else 0
prediction = find_max_node(o_nodes)
return prediction

return predict


class NNUnit:
"""Single Unit of Multiple Layer Neural Network
inputs: Incoming connections
weights: Weights to incoming connections
"""

def __init__(self, weights=None, inputs=None):
self.weights = []
self.inputs = []
self.value = None
self.activation = sigmoid


def network(input_units, hidden_layer_sizes, output_units):
"""Create Directed Acyclic Network of given number layers.
hidden_layers_sizes : List number of neuron units in each hidden layer
excluding input and output layers
"""
# Check for PerceptronLearner
if hidden_layer_sizes:
layers_sizes = [input_units] + hidden_layer_sizes + [output_units]
else:
layers_sizes = [input_units] + [output_units]

net = [[NNUnit() for n in range(size)]
for size in layers_sizes]
n_layers = len(net)

# Make Connection
for i in range(1, n_layers):
for n in net[i]:
for k in net[i-1]:
n.inputs.append(k)
n.weights.append(0)
return net


def init_examples(examples, idx_i, idx_t, o_units):
inputs = {}
targets = {}

for i in range(len(examples)):
e = examples[i]
# Input values of e
inputs[i] = [e[i] for i in idx_i]

if o_units > 1:
# One-Hot representation of e's target
t = [0 for i in range(o_units)]
t[e[idx_t]] = 1
targets[i] = t
else:
# Target value of e
targets[i] = [e[idx_t]]

return inputs, targets


def find_max_node(nodes):
index = -1
max_score = -10000
# Find max hypothesis
for i in range(len(nodes)):
curr_score = nodes[i].value
if curr_score > max_score:
index = i
max_score = curr_score

return index

# ______________________________________________________________________________


Expand Down
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy