1## Copyright (C) 2007 Michel D. Schmid <michaelschmid@users.sourceforge.net> 2## 3## 4## This program is free software; you can redistribute it and/or modify it 5## under the terms of the GNU General Public License as published by 6## the Free Software Foundation; either version 2, or (at your option) 7## any later version. 8## 9## This program is distributed in the hope that it will be useful, but 10## WITHOUT ANY WARRANTY; without even the implied warranty of 11## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 12## General Public License for more details. 13## 14## You should have received a copy of the GNU General Public License 15## along with this program; see the file COPYING. If not, see 16## <http://www.gnu.org/licenses/>. 17 18## author: msd 19 20 21## This file is used to test all the m-files inside 22## the example1 directory. 23 24## it exist for each m-file a corresponding dat-file with 25## the numerical results of matlab 26 27## actually, following m-files will be tested: 28## A. One hidden layer 29## ================== 30## 1. mlp9_1_1_tansig 31## 2. mlp9_2_1_tansig 32## 3. mlp9_2_2_tansig 33## 4. mlp9_2_3_tansig 34## 5. mlp9_5_3_tansig 35## 36## B. Two hidden layer 37## ================== 38 39###### mlp9_2_1_tansig ###### 40%!shared cAr, mTestResults, simOut, line, fid 41%! diary log_test1_2 42%! dir = "example1"; 43%! [cAr, mTestResults] = loadtestresults([dir "/mlp9_2_1_tansig.dat"]); 44%! preparedata9_x_1 45%! [mTrainInputN,cMeanInput,cStdInput] = prestd(mTrainInput);# standardize inputs 46%! mMinMaxElements = min_max(mTrainInputN); # input matrix with (R x 2)... 47%! nHiddenNeurons = 2; 48%! nOutputNeurons = 1; 49%! MLPnet = newff(mMinMaxElements,[nHiddenNeurons nOutputNeurons],{"tansig","purelin"},"trainlm","learngdm","mse"); 50%! MLPnet.IW{1,1}(1,:) = 0.5; 51%! MLPnet.IW{1,1}(2,:) = 1.5; 52%! MLPnet.LW{2,1}(:) = 0.5; 53%! MLPnet.b{1,1}(1,:) = 0.5; 54%! MLPnet.b{1,1}(2,:) = 1.5; 55%! MLPnet.b{2,1}(:) = 0.5; 56%! VV.P = mValiInput; 57%! VV.T = mValliOutput; 58%! VV.P = trastd(VV.P,cMeanInput,cStdInput); 59%! [net] = train(MLPnet,mTrainInputN,mTrainOutput,[],[],VV); 60%! [mTestInputN] = trastd(mTestInput,cMeanInput,cStdInput); 61%! [simOut] = sim(net,mTestInputN); 62%! diary off 63%!assert(simOut,mTestResults,0.0001) 64%! fid = fopen("log_test1_2","r"); 65%! line = fgetl(fid); 66%!assert(substr(line,16,1),substr(cAr{1,1},16,1)) 67%!assert(substr(line,27,7),substr(cAr{1,1},27,7)) 68%!assert(substr(line,48,7),substr(cAr{1,1},48,7)) 69%! line = fgetl(fid); 70%!assert(substr(line,16,2),substr(cAr{1,2},16,2)) 71%!assert(substr(line,27,7),substr(cAr{1,2},27,7)) 72%!assert(substr(line,48,6),substr(cAr{1,2},48,6)) 73%!assert(strcmp("TRAINLM, Validation stop.",substr(cAr{1,3},1,25))) 74%! fclose(fid); 75 76 77 78 79 80 81 82 83 84 85 86 87