1% STK_PREDICT_LEAVEONEOUT [overload STK function]
2
3% Copyright Notice
4%
5%    Copyright (C) 2017, 2018 CentraleSupelec
6%    Copyright (C) 2017 LNE
7%
8%    Authors:  Remi Stroh   <remi.stroh@lne.fr>
9%              Julien Bect  <julien.bect@centralesupelec.fr>
10
11% Copying Permission Statement
12%
13%    This file is part of
14%
15%            STK: a Small (Matlab/Octave) Toolbox for Kriging
16%               (http://sourceforge.net/projects/kriging)
17%
18%    STK is free software: you can redistribute it and/or modify it under
19%    the terms of the GNU General Public License as published by the Free
20%    Software Foundation,  either version 3  of the License, or  (at your
21%    option) any later version.
22%
23%    STK is distributed  in the hope that it will  be useful, but WITHOUT
24%    ANY WARRANTY;  without even the implied  warranty of MERCHANTABILITY
25%    or FITNESS  FOR A  PARTICULAR PURPOSE.  See  the GNU  General Public
26%    License for more details.
27%
28%    You should  have received a copy  of the GNU  General Public License
29%    along with STK.  If not, see <http://www.gnu.org/licenses/>.
30
31function [LOO_pred, LOO_res] = stk_predict_leaveoneout (M_post)
32
33prior_model = M_post.prior_model;
34
35% Compute the covariance matrix, and the trend matrix
36% (this covariance matrix K takes the noise into account)
37[K, P] = stk_make_matcov (prior_model, M_post.input_data);
38simple_kriging = (size (P, 2) == 0);
39
40% If simple kriging, just compute the inverse covariance matrix
41if simple_kriging
42    R = inv (K);
43else
44    % Use a more complex formula ("virtual cross-validation")
45    P_K = P' / K;
46    R = K \ (eye (size (K)) - P * ((P_K * P) \ P_K));
47    % I = inv (K);
48    % R = I - I * P * (inv (P' * I * P)) * P' * I;
49end
50dR = diag (R);  % The diagonal of the LOO matrix
51
52% Mean
53zi = M_post.output_data;
54raw_res = R * zi ./ dR;  % Compute "raw" residuals
55zp_mean = zi - raw_res;  % LOO prediction
56
57% Variance
58noisevariance = stk_get_observation_variances (M_post);
59zp_var = max (0, 1 ./ dR - noisevariance);
60
61LOO_pred = stk_dataframe (horzcat (zp_mean, zp_var), {'mean', 'var'});
62
63% Compute residuals ?
64if nargout ~= 1
65
66    % Compute normalized residual
67    % norm_res = (zi - zp_mean) ./ (sqrt (noisevariance + zp_var));
68    norm_res = (sqrt (dR)) .* raw_res;
69
70    % Pack results into a dataframe
71    LOO_res = stk_dataframe (horzcat (raw_res, norm_res), ...
72        {'residuals', 'norm_res'});
73end
74
75% Create LOO cross-validation plots?
76if nargout == 0
77
78    % Plot predictions VS observations (left planel)...
79    stk_subplot (1, 2, 1);  stk_plot_predvsobs (M_post.output_data, LOO_pred);
80
81    % ...and normalized residuals (right panel)
82    stk_subplot (1, 2, 2);   stk_plot_histnormres (LOO_res.norm_res);
83
84end
85
86end % function
87
88
89%!test  % Check virtual Leave-One-Out formula
90%!
91%! n = 20;  d = 1;
92%! x_obs = stk_sampling_regulargrid (n, d, [0; 2*pi]);
93%! z_obs = stk_feval (@sin, x_obs);
94%!
95%! lm_list = {stk_lm_null, stk_lm_constant, stk_lm_affine};
96%!
97%! for j = 0:2
98%!     for k = 1:(length (lm_list))
99%!
100%!         model = stk_model ('stk_materncov32_iso', d);
101%!         model.lm = lm_list{k};
102%!         model.param = log ([1; 5]);
103%!
104%!         switch j  % test various scenarios for lognoisevariance
105%!             case 0
106%!                 model.lognoisevariance = -inf;
107%!             case 1
108%!                 model.lognoisevariance = 0;
109%!             case 2
110%!                 model.lognoisevariance = (1 + rand (n, 1)) * 1e-3;
111%!         end
112%!
113%!         M_post = stk_model_gpposterior (model, x_obs, z_obs);
114%!
115%!         [loo_pred, loo_res] = stk_predict_leaveoneout (M_post);
116%!         [direct_pred, direct_res] = stk_predict_leaveoneout_direct (M_post);
117%!
118%!         assert (stk_isequal_tolrel (loo_pred, direct_pred));
119%!         assert (stk_isequal_tolrel (loo_res, direct_res));
120%!
121%!     end
122%! end
123