Skip to content

Commit

Permalink
test and training
Browse files Browse the repository at this point in the history
  • Loading branch information
lsoncini committed May 17, 2017
1 parent a8d0d36 commit f86c292
Show file tree
Hide file tree
Showing 3 changed files with 53 additions and 13 deletions.
3 changes: 2 additions & 1 deletion configuration.m
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@
data_file = 'terrain/terrain03.data';
starting_line = 2;
beta = 15;
activation = 'exp';
activation = 1;
net = [2, 45, 50, 1];
eta = 0.1;
max_iterations = 1e2;
cut_error = 1e-4;
train_percentage = 0.9;

% Optimizations
alpha = 0;
Expand Down
14 changes: 11 additions & 3 deletions main.m
Original file line number Diff line number Diff line change
Expand Up @@ -5,24 +5,32 @@
terrain = dlmread(data_file);
terrain = terrain(starting_line:end, :);

if activation == 'exp'
if activation == 1
[f,fder] = activation_exp(beta);
else
[f,fder] = activation_tanh(beta);
end

[weights, output, mse] = multilayer_perceptron_learn(terrain(:,1:2)', terrain(:,3)', net, f, fder, eta, max_iterations, cut_error, alpha, adaptative_eta);
[weights, output, mse, test_mse] = multilayer_perceptron_learn(terrain(:,1:2)', terrain(:,3)', train_percentage, net, f, fder, eta, max_iterations, cut_error, alpha, adaptative_eta);

if print_error
figure(1);
semilogy(mse, 'LineWidth', 2);
title("ECM");
xlabel("Épocas");
ylabel("ECM en escala logarítmica");
if length(test_mse) > 0
hold on;
semilogy(test_mse, 'LineWidth', 2);
total_error = mse * train_percentage + test_mse * (1-train_percentage);
semilogy(total_error, 'LineWidth', 2);
legend('ECM Entrenamiento','ECM Testeo', 'ECM Total');
hold off;
end
end

if print_estimation
figure(2);
figure(3);
x = [-3:0.025:3];
y = x;
for i = 1:length(x)
Expand Down
49 changes: 40 additions & 9 deletions multilayer_perceptron.m
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,35 @@
end
end

function [weights,output,error_per_iteration] = multilayer_perceptron_learn(entries, expected_output, neurons_per_layer, activation_func, activation_der,
function [weights, output, error_per_iteration, test_error] = multilayer_perceptron_learn(entries, expected_output, train_percentage, neurons_per_layer, activation_func, activation_der,
learning_factor=.5, max_iterations=1000, tolerance=1e-5, alpha=0, adaptative_eta=false, dbug=false)
%number of entries

full_entry = entries;
full_expected = expected_output;
n = length(entries(1,:));

train_amount = floor(train_percentage * n);
counter = 1;
test_error = [];

if train_percentage ~= 1
for index = randperm(n)
if counter < train_amount
train_entry(:, counter) = full_entry(:, index);
training_expected_output(counter) = full_expected(index);
else
test_entry(:, counter - train_amount + 1) = full_entry(:, index);
test_expected_output(counter - train_amount + 1) = full_expected(index);
end
counter = counter + 1;
end
entries = train_entry;
expected_output = training_expected_output;
end

n = length(entries(1,:));
eta = learning_factor;
a = 0;

%number of layers
m = 0;
Expand All @@ -32,8 +56,9 @@
%weights{m} = (2*(rand(neurons_per_layer(i), neurons_per_layer(i-1)+1) .- 0.5))./100;
weights{m} = (rand(neurons_per_layer(i), neurons_per_layer(i-1)+1) .- 0.5)./(neurons_per_layer(i-1));
layer_entry{m} = [-1, zeros(1, neurons_per_layer(i-1))];
h{m} = [-1 ,zeros(1, neurons_per_layer(i-1))];
h{m} = [-1, zeros(1, neurons_per_layer(i-1))];
end

%last layer
M = m;

Expand All @@ -43,14 +68,17 @@
for index = randperm(n);
%get layers output
layer_entry{1}(2:end) = entries(:, index);

for m = 2:M
h{m-1} = weights{m-1} * layer_entry{m-1}';
layer_entry{m}(2:end) = activation_func(h{m-1});
end

if dbug
layer_entry
fflush(1);
end

h{M} = weights{M} * layer_entry{M}';

%no linear
Expand All @@ -67,7 +95,7 @@
%d{M};

for i = M-1:-1:1
d{i} = (activation_der(h{i})' .* (d{i+1} * weights{i+1}(:,2:end)));
d{i} = (activation_der(h{i})' .* (d{i+1} * weights{i+1}(:, 2:end)));
end

%correct weights
Expand Down Expand Up @@ -104,9 +132,12 @@
a = alpha;
end
end

if dbug
[error_per_iteration(iteration),iteration,toc, learning_factor]
if train_percentage ~= 1
test_error(iteration) = sum((test_expected_output - get_output(test_entry, weights, neurons_per_layer, activation_func)).^2)/length(test_entry(1,:));
[error_per_iteration(iteration), iteration, toc, learning_factor, test_error(iteration)]
fflush(1);
else
[error_per_iteration(iteration), iteration, toc, learning_factor]
fflush(1);
end

Expand All @@ -117,7 +148,7 @@
if(learning_factor < 0.001 * eta)
learning_factor = 0.1 * eta;
end

end

end

end

0 comments on commit f86c292

Please sign in to comment.