Skip to content

Commit

Permalink
working state
Browse files Browse the repository at this point in the history
  • Loading branch information
lmarzora committed May 6, 2017
1 parent 364aefc commit 52c648e
Show file tree
Hide file tree
Showing 3 changed files with 48 additions and 0 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
.octave-workspace
4 changes: 4 additions & 0 deletions activation_tanh.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
function [f,fder] = activation_tanh(b)
f = @(x) tanh(b*x);
fder = @(x) b*(1-f(x).^2);
end
43 changes: 43 additions & 0 deletions multilayer_perceptron.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
2;
function output = get_output(entry,weights,activation_func)
output = activation_func(weights * entry);
end

function [weights,output] = multilayer_perceptron_learn(entries, expected_output, neurons_per_layer, activation_func, activation_der,
learning_factor=.5, max_iterations=1000, tolerance=1e-5)
n = length(entries(:, 1));
m = 0;
for i = 2:length(neurons_per_layer)
m = m + 1;
weights{m} = rand(neurons_per_layer(i), neurons_per_layer(i-1)+1) .- 0.5;
layer_entry{m} = [-1, zeros(1, neurons_per_layer(i-1))];
end
M = m
for iteration = 1:max_iterations
for index = randperm(2^n);
layer_entry{1}(2:end) = entries(:, index);
for m = 2:M
layer_entry{m}(2:end) = activation_func(weights{m-1} * layer_entry{m-1}');
end
layer_entry;
output(index) = activation_func(weights{M} * layer_entry{M}');
output(index);
d{M} = activation_der(weights{M}*layer_entry{M}')*(expected_output(index) - output(index));
for i = M-1:-1:1
d{i} = activation_der(weights{i}*layer_entry{i}')*(weights{i+1}(:,2:end)*d{i+1});
end
d;
weights;
for i = 1:M
delta_w = learning_factor * layer_entry{i}'*d{i};
weights{i} = weights{i} + delta_w';
end
end
sum(abs(expected_output - output))
if sum(abs(expected_output - output)) <= tolerance
return
end
end

end

0 comments on commit 52c648e

Please sign in to comment.