1 /*  _______________________________________________________________________
2 
3     DAKOTA: Design Analysis Kit for Optimization and Terascale Applications
4     Copyright 2014-2020 National Technology & Engineering Solutions of Sandia, LLC (NTESS).
5     This software is distributed under the GNU Lesser General Public License.
6     For more information, see the README file in the top Dakota directory.
7     _______________________________________________________________________ */
8 
9 //- Class:        EvaluationStore
10 //- Description:  Class implementation
11 //- Owner:        J. Adam Stephens
12 #include <memory>
13 #include <algorithm>
14 #include <tuple>
15 #include <cmath>
16 #include "EvaluationStore.hpp"
17 #ifdef DAKOTA_HAVE_HDF5
18 #include "HDF5_IO.hpp"
19 #endif
20 #include "DakotaVariables.hpp"
21 #include "DakotaResponse.hpp"
22 #include "DakotaActiveSet.hpp"
23 #include "dakota_data_types.hpp"
24 #include "dakota_results_types.hpp"
25 #include "MarginalsCorrDistribution.hpp"
26 
27 namespace Dakota {
28 
29 const Real REAL_DSET_FILL_VAL = NAN; // not a number, constant defined in <cmath>
30 const int INT_DSET_FILL_VAL = INT_MAX;
31 const String STR_DSET_FILL_VAL = "";
32 
DefaultSet(const ActiveSet & in_set)33 DefaultSet::DefaultSet(const ActiveSet &in_set) : set(in_set) {
34   const ShortArray &asv = set.request_vector();
35   numFunctions = asv.size();
36   numGradients = std::count_if(asv.begin(),
37                                 asv.end(),
38               [](const short &a){return a & 2;});
39   numHessians = std::count_if(asv.begin(),
40                                 asv.end(),
41               [](const short &a){return a & 4;});
42 }
43 
44 
45 const int HDF5_CHUNK_SIZE = 40000;
46 #ifdef DAKOTA_HAVE_HDF5
set_database(std::shared_ptr<HDF5IOHelper> db_ptr)47 void EvaluationStore::set_database(std::shared_ptr<HDF5IOHelper> db_ptr) {
48   hdf5Stream = db_ptr;
49 }
50 #endif
51 
active()52 bool EvaluationStore::active() {
53   #ifdef DAKOTA_HAVE_HDF5
54   return bool(hdf5Stream);
55   #else
56   return false;
57   #endif
58 }
59 
60 
create_variable_type_map()61 std::map<unsigned short, String> EvaluationStore::create_variable_type_map() {
62   std::map<unsigned short, String> variable_types;
63   variable_types[EMPTY_TYPE] = "EMPTY_TYPE";
64   variable_types[CONTINUOUS_DESIGN] = "CONTINUOUS_DESIGN";
65   variable_types[DISCRETE_DESIGN_RANGE] = "DISCRETE_DESIGN_RANGE";
66   variable_types[DISCRETE_DESIGN_SET_INT] = "DISCRETE_DESIGN_SET_INT";
67   variable_types[DISCRETE_DESIGN_SET_STRING] = "DISCRETE_DESIGN_SET_STRING";
68   variable_types[DISCRETE_DESIGN_SET_REAL] = "DISCRETE_DESIGN_SET_REAL";
69   variable_types[NORMAL_UNCERTAIN] = "NORMAL_UNCERTAIN";
70   variable_types[LOGNORMAL_UNCERTAIN] = "LOGNORMAL_UNCERTAIN";
71   variable_types[UNIFORM_UNCERTAIN] = "UNIFORM_UNCERTAIN";
72   variable_types[LOGUNIFORM_UNCERTAIN] = "LOGUNIFORM_UNCERTAIN";
73   variable_types[TRIANGULAR_UNCERTAIN] = "TRIANGULAR_UNCERTAIN";
74   variable_types[EXPONENTIAL_UNCERTAIN] = "EXPONENTIAL_UNCERTAIN";
75   variable_types[BETA_UNCERTAIN] = "BETA_UNCERTAIN";
76   variable_types[GAMMA_UNCERTAIN] = "GAMMA_UNCERTAIN";
77   variable_types[GUMBEL_UNCERTAIN] = "GUMBEL_UNCERTAIN";
78   variable_types[FRECHET_UNCERTAIN] = "FRECHET_UNCERTAIN";
79   variable_types[WEIBULL_UNCERTAIN] = "WEIBULL_UNCERTAIN";
80   variable_types[HISTOGRAM_BIN_UNCERTAIN] = "HISTOGRAM_BIN_UNCERTAIN";
81   variable_types[POISSON_UNCERTAIN] = "POISSON_UNCERTAIN";
82   variable_types[BINOMIAL_UNCERTAIN] = "BINOMIAL_UNCERTAIN";
83   variable_types[NEGATIVE_BINOMIAL_UNCERTAIN] = "NEGATIVE_BINOMIAL_UNCERTAIN";
84   variable_types[GEOMETRIC_UNCERTAIN] = "GEOMETRIC_UNCERTAIN";
85   variable_types[HYPERGEOMETRIC_UNCERTAIN] = "HYPERGEOMETRIC_UNCERTAIN";
86   variable_types[HISTOGRAM_POINT_UNCERTAIN_INT] = "HISTOGRAM_POINT_UNCERTAIN_INT";
87   variable_types[HISTOGRAM_POINT_UNCERTAIN_STRING] = "HISTOGRAM_POINT_UNCERTAIN_STRING";
88   variable_types[HISTOGRAM_POINT_UNCERTAIN_REAL] = "HISTOGRAM_POINT_UNCERTAIN_REAL";
89   variable_types[CONTINUOUS_INTERVAL_UNCERTAIN] = "CONTINUOUS_INTERVAL_UNCERTAIN";
90   variable_types[DISCRETE_INTERVAL_UNCERTAIN] = "DISCRETE_INTERVAL_UNCERTAIN";
91   variable_types[DISCRETE_UNCERTAIN_SET_INT] = "DISCRETE_UNCERTAIN_SET_INT";
92   variable_types[DISCRETE_UNCERTAIN_SET_STRING] = "DISCRETE_UNCERTAIN_SET_STRING";
93   variable_types[DISCRETE_UNCERTAIN_SET_REAL] = "DISCRETE_UNCERTAIN_SET_REAL";
94   variable_types[CONTINUOUS_STATE] = "CONTINUOUS_STATE";
95   variable_types[DISCRETE_STATE_RANGE] = "DISCRETE_STATE_RANGE";
96   variable_types[DISCRETE_STATE_SET_INT] = "DISCRETE_STATE_SET_INT";
97   variable_types[DISCRETE_STATE_SET_STRING] = "DISCRETE_STATE_SET_STRING";
98   variable_types[DISCRETE_STATE_SET_REAL] = "DISCRETE_STATE_SET_REAL";
99   return variable_types;
100 }
101 
102 const std::map<unsigned short, String> EvaluationStore::variableTypes = EvaluationStore::create_variable_type_map();
103 
104 // Declare a source for the mdoel or iterator.
105 // Permissible values of owner_type are "iterator",
106 //  "nested", "surrogate", "recast", and "simulation".
107 // Permissible values of source_type are "iterator", "nested", "surrogate",
108 // "recast", "simulation", "interface", and "approximation".
109 void EvaluationStore::
declare_source(const String & owner_id,const String & owner_type,const String & source_id,const String & source_type)110 declare_source(const String &owner_id, const String &owner_type,
111                const String &source_id, const String &source_type) {
112 #ifdef DAKOTA_HAVE_HDF5
113   if(!active())
114     return;
115   // Location of source model or interface evals or method results
116   String source_location;
117   // Location of the link to the source
118   String link_location;
119   // TODO: Report/raise some kind of error for invalid owner or source strings
120 
121   if(owner_type == "iterator") {
122     link_location = String("/methods/") + owner_id + "/sources/" + source_id;
123     if(source_type == "iterator") { // always link iterator sources
124       source_location = String("/methods/") + source_id;
125       hdf5Stream->create_softlink(link_location, source_location);
126     } else { // source is a model
127       if( (modelSelection == MODEL_EVAL_STORE_TOP_METHOD && owner_id == topLevelMethodId) ||
128            modelSelection == MODEL_EVAL_STORE_ALL_METHODS )
129         sourceModels.emplace(source_id);
130       if(model_active(source_id)) { // Only link if evals for this model will be stored
131         source_location = String("/models/") + source_type + "/" + source_id;
132         hdf5Stream->create_softlink(link_location, source_location);
133       }
134     }
135   } else { // owner is a model. Assume it should be stored.
136     link_location = String("/models/") + owner_type + "/" + owner_id + "/sources/" + source_id;
137     if(source_type == "iterator") {
138       source_location = String("/methods/") + source_id;
139       hdf5Stream->create_softlink(link_location, source_location);
140     } else if(source_type == "interface" && interface_active(source_type)) {
141       source_location = String("/interfaces/") + source_id + "/" + owner_id;
142       hdf5Stream->create_softlink(link_location, source_location);
143     }
144     else if(model_active(source_id)) { // source is a model
145       source_location = String("/models/") + source_type + "/" + source_id;
146       hdf5Stream->create_softlink(link_location, source_location);
147     }
148   }
149 #else
150   return;
151 #endif
152 }
153 
iterator_allocate(const String & iterator_id,const bool & top_level)154 EvaluationsDBState EvaluationStore::iterator_allocate(const String &iterator_id,
155     const bool &top_level) {
156 #ifdef DAKOTA_HAVE_HDF5
157   if(!active())
158     return EvaluationsDBState::INACTIVE;
159   if(top_level) {
160     topLevelMethodId = iterator_id;
161     hdf5Stream->add_attribute("/", "top_method", iterator_id);
162   }
163   return EvaluationsDBState::ACTIVE;
164 #else
165   return EvaluationsDBState::INACTIVE;
166 #endif
167 }
168 
169 /// Allocate storage for model evaluations
model_allocate(const String & model_id,const String & model_type,const Variables & variables,const Pecos::MultivariateDistribution & mv_dist,const Response & response,const ActiveSet & set)170 EvaluationsDBState EvaluationStore::model_allocate(const String &model_id, const String &model_type,
171                     const Variables &variables, const Pecos::MultivariateDistribution &mv_dist,
172                     const Response &response, const ActiveSet &set) {
173 #ifdef DAKOTA_HAVE_HDF5
174   if(! (active() && model_active(model_id)))
175     return EvaluationsDBState::INACTIVE;
176   allocatedModels.emplace(model_id);
177   const auto & ds_pair = modelDefaultSets.emplace(model_id, DefaultSet(set));
178   const DefaultSet &default_set = (*ds_pair.first).second;
179   String root_group = create_model_root(model_id, model_type);
180   String scale_root = create_scale_root(root_group);
181   // Create evaluation ID dataset, which is attached as a scale to many datasets
182   String eval_ids_scale = scale_root + "evaluation_ids";
183   hdf5Stream->create_empty_dataset(eval_ids_scale, {0},
184       ResultsOutputType::INTEGER, HDF5_CHUNK_SIZE);
185 
186   std::shared_ptr<Pecos::MarginalsCorrDistribution> mvd_rep =
187     std::static_pointer_cast<Pecos::MarginalsCorrDistribution>
188     (mv_dist.multivar_dist_rep());
189   // BMA: Left this a raw get() due to default of NULL
190   allocate_variables(root_group, variables, mvd_rep.get());
191   allocate_response(root_group, response, default_set);
192   allocate_metadata(root_group, variables, response, default_set);
193   return EvaluationsDBState::ACTIVE;
194 #else
195   return EvaluationsDBState::INACTIVE;
196 #endif
197 }
198 
199 /// Allocate storage for evalulations of interface+model pairs
interface_allocate(const String & model_id,const String & interface_id,const String & interface_type,const Variables & variables,const Response & response,const ActiveSet & set,const String2DArray & an_comp)200 EvaluationsDBState EvaluationStore::interface_allocate(const String &model_id, const String &interface_id,
201                     const String &interface_type, const Variables &variables, const Response &response,
202                     const ActiveSet &set, const String2DArray &an_comp) {
203 #ifdef DAKOTA_HAVE_HDF5
204   if(!(active() && interface_active(interface_type)))
205     return EvaluationsDBState::INACTIVE;
206   allocatedInterfaces.emplace(make_pair(model_id, interface_id));
207   const auto & ds_pair = interfaceDefaultSets.emplace(std::make_pair(model_id, interface_id), DefaultSet(set));
208   const DefaultSet &default_set = (*ds_pair.first).second;
209   String root_group = create_interface_root(model_id, interface_id);
210   String scale_root = create_scale_root(root_group);
211   // Create evaluation ID dataset, which is attached as a scale to many datasets
212   String eval_ids_scale = scale_root + "evaluation_ids";
213   hdf5Stream->create_empty_dataset(eval_ids_scale, {0},
214       ResultsOutputType::INTEGER, HDF5_CHUNK_SIZE);
215 
216   allocate_variables(root_group, variables);
217   allocate_response(root_group, response, default_set);
218   allocate_metadata(root_group, variables, response, default_set, an_comp);
219   return EvaluationsDBState::ACTIVE;
220 #else
221   return EvaluationsDBState::INACTIVE;
222 #endif
223 }
224 
225 /// Store a model evaluation
store_model_variables(const String & model_id,const String & model_type,const int & eval_id,const ActiveSet & set,const Variables & variables)226 void EvaluationStore::store_model_variables(const String &model_id, const String &model_type,
227                             const int &eval_id, const ActiveSet &set, const Variables &variables) {
228 #ifdef DAKOTA_HAVE_HDF5
229   if(!active())
230     return;
231   const DefaultSet &default_set_s = modelDefaultSets[model_id];
232   if(set.request_vector().size() != default_set_s.numFunctions) {
233     if(resizedModels.find(model_id) == resizedModels.end()) {
234       resizedModels.insert(model_id);
235       Cerr << "Warning: Number of functions provided to HDF5 database by model\n"
236         << "\n  '" << model_id << "'\n\nhas changed since the study began. This behavior currently is\n"
237         << "not supported. Storage will be skipped.\n";
238     }
239     modelResponseIndexCache.emplace(std::make_tuple(model_id, eval_id), -1);
240     return;
241   }
242   resizedModels.erase(model_id);
243   String root_group = create_model_root(model_id, model_type);
244   String scale_root = create_scale_root(root_group);
245   // Create evaluation ID dataset, which is attached as a scale to many datasets
246   String eval_ids_scale = scale_root + "evaluation_ids";
247   hdf5Stream->append_scalar(eval_ids_scale, eval_id);
248   store_variables(root_group, variables);
249   store_metadata(root_group, set, default_set_s);
250 
251 
252   int resp_idx = hdf5Stream->append_empty(root_group + "responses/functions");
253   const ShortArray &default_asv = default_set_s.set.request_vector();
254   if( default_set_s.numGradients )
255     hdf5Stream->append_empty(root_group + "responses/gradients");
256   if( default_set_s.numHessians )
257     hdf5Stream->append_empty(root_group + "responses/hessians");
258   modelResponseIndexCache.emplace(std::make_tuple(model_id, eval_id), resp_idx);
259 #else
260   return;
261 #endif
262 }
263 
264 /// Store a response for model evaluation
store_model_response(const String & model_id,const String & model_type,const int & eval_id,const Response & response)265 void EvaluationStore::store_model_response(const String &model_id, const String &model_type,
266                             const int &eval_id, const Response &response) {
267 #ifdef DAKOTA_HAVE_HDF5
268   if(!active())
269     return;
270   const DefaultSet &default_set_s = modelDefaultSets[model_id];
271   std::tuple<String, int> key(model_id, eval_id);
272   int response_index = modelResponseIndexCache[key];
273   if(response_index == -1)
274     return;
275   String root_group = create_model_root(model_id, model_type);
276   store_response(root_group, response_index, response, default_set_s);
277   auto cache_entry = modelResponseIndexCache.find(key);
278   modelResponseIndexCache.erase(cache_entry);
279 #else
280   return;
281 #endif
282 }
283 
284 /// Store variables for an interface+model evaluation
store_interface_variables(const String & model_id,const String & interface_id,const int & eval_id,const ActiveSet & set,const Variables & variables)285 void EvaluationStore::store_interface_variables(const String &model_id, const String &interface_id,
286                             const int &eval_id, const ActiveSet &set, const Variables &variables) {
287 #ifdef DAKOTA_HAVE_HDF5
288   if(!active())
289     return;
290   String root_group = create_interface_root(model_id, interface_id);
291   String scale_root = create_scale_root(root_group);
292   const auto set_key = std::make_pair(model_id, interface_id);
293   const DefaultSet &default_set_s = interfaceDefaultSets[set_key];
294   // Create evaluation ID dataset, which is attached as a scale to many datasets
295   String eval_ids_scale = scale_root + "evaluation_ids";
296   hdf5Stream->append_scalar(eval_ids_scale, eval_id);
297   store_variables(root_group, variables);
298   store_metadata(root_group, set, default_set_s);
299 
300   int resp_idx = hdf5Stream->append_empty(root_group + "responses/functions");
301   if( default_set_s.numGradients)
302     hdf5Stream->append_empty(root_group + "responses/gradients");
303   if( default_set_s.numHessians)
304     hdf5Stream->append_empty(root_group + "responses/hessians");
305   interfaceResponseIndexCache.emplace(std::make_tuple(model_id, interface_id, eval_id), resp_idx);
306 #else
307   return;
308 #endif
309 }
310 
311 /// Store a response for an interface+model evaluation
store_interface_response(const String & model_id,const String & interface_id,const int & eval_id,const Response & response)312 void EvaluationStore::store_interface_response(const String &model_id, const String &interface_id,
313                             const int &eval_id, const Response &response) {
314 #ifdef DAKOTA_HAVE_HDF5
315   if(!active())
316     return;
317   std::tuple<String, String, int> key(model_id, interface_id, eval_id);
318   int response_index = interfaceResponseIndexCache[key];
319   String root_group = create_interface_root(model_id, interface_id);
320   store_response(root_group, response_index, response, interfaceDefaultSets[std::make_pair(model_id, interface_id)]);
321   auto cache_entry = interfaceResponseIndexCache.find(key);
322   interfaceResponseIndexCache.erase(cache_entry);
323 #else
324   return;
325 #endif
326 }
327 
create_interface_root(const String & model_id,const String & interface_id)328 String EvaluationStore::create_interface_root(const String &model_id, const String &interface_id) {
329   return String("/interfaces/") + interface_id + '/' + model_id + '/';
330 }
331 
create_model_root(const String & model_id,const String & model_type)332 String EvaluationStore::create_model_root(const String &model_id, const String &model_type) {
333   return String("/models/") + model_type + '/' + model_id + '/';
334 }
335 
create_scale_root(const String & root_group)336 String EvaluationStore::create_scale_root(const String &root_group) {
337   return String("/_scales") + root_group;
338 }
339 
340 /// Allocate storage for variables
allocate_variables(const String & root_group,const Variables & variables,Pecos::MarginalsCorrDistribution * mvd_rep)341 void EvaluationStore::allocate_variables(const String &root_group, const Variables &variables,
342     Pecos::MarginalsCorrDistribution *mvd_rep) {
343   // TODO: variable names and order
344 #ifdef DAKOTA_HAVE_HDF5
345   String variables_root_group = root_group + "variables/";
346   String scale_root = create_scale_root(root_group);
347   String variables_scale_root = scale_root + "variables/";
348   String eval_ids = scale_root + "evaluation_ids";
349 
350   if(mvd_rep) // will be NULL for interfaces
351     allocate_variable_parameters(root_group, variables, mvd_rep);
352 
353   if(variables.acv()) {
354     String data_name = variables_root_group + "continuous";
355     String labels_name = variables_scale_root + "continuous_descriptors";
356     String ids_name = variables_scale_root + "continuous_ids";
357     String types_name = variables_scale_root + "continuous_types";
358 
359     hdf5Stream->create_empty_dataset(data_name, {0, int(variables.acv())},
360         ResultsOutputType::REAL, HDF5_CHUNK_SIZE);
361     hdf5Stream->store_vector(labels_name,
362                              variables.all_continuous_variable_labels());
363     hdf5Stream->attach_scale(data_name, eval_ids, "evaluation_ids", 0);
364     hdf5Stream->attach_scale(data_name, labels_name, "variables", 1);
365     hdf5Stream->store_vector(ids_name, variables.all_continuous_variable_ids());
366     hdf5Stream->attach_scale(data_name, ids_name, "ids", 1);
367 
368     UShortMultiArrayConstView types = variables.all_continuous_variable_types();
369     StringArray type_labels(variables.acv());
370     std::transform(types.begin(), types.end(), type_labels.begin(),
371         [](const unsigned short t){return variableTypes.at(t);});
372     hdf5Stream->store_vector(types_name, type_labels);
373     hdf5Stream->attach_scale(data_name, types_name, "types", 1);
374   }
375 
376   if(variables.adiv()) {
377     String data_name = variables_root_group + "discrete_integer";
378     String labels_name = variables_scale_root + "discrete_integer_descriptors";
379     String ids_name = variables_scale_root + "discrete_integer_ids";
380     String types_name = variables_scale_root + "discrete_integer_types";
381 
382     hdf5Stream->create_empty_dataset(data_name, {0, int(variables.adiv())},
383         ResultsOutputType::INTEGER, HDF5_CHUNK_SIZE);
384     hdf5Stream->store_vector(labels_name,
385                              variables.all_discrete_int_variable_labels());
386     hdf5Stream->attach_scale(data_name, eval_ids, "evaluation_ids", 0);
387     hdf5Stream->attach_scale(data_name, labels_name, "variables", 1);
388     hdf5Stream->store_vector(ids_name, variables.all_discrete_int_variable_ids());
389     hdf5Stream->attach_scale(data_name, ids_name, "ids", 1);
390 
391     UShortMultiArrayConstView types = variables.all_discrete_int_variable_types();
392     StringArray type_labels(variables.adiv());
393     std::transform(types.begin(), types.end(), type_labels.begin(),
394         [](const unsigned short t){return variableTypes.at(t);});
395     hdf5Stream->store_vector(types_name, type_labels);
396     hdf5Stream->attach_scale(data_name, types_name, "types", 1);
397   }
398 
399   if(variables.adsv()) {
400     String data_name = variables_root_group + "discrete_string";
401     String labels_name = variables_scale_root + "discrete_string_descriptors";
402     String ids_name = variables_scale_root + "discrete_string_ids";
403     String types_name = variables_scale_root + "discrete_string_types";
404 
405     hdf5Stream->create_empty_dataset(data_name, {0, int(variables.adsv())},
406         ResultsOutputType::STRING, HDF5_CHUNK_SIZE);
407     hdf5Stream->store_vector(labels_name,
408                              variables.all_discrete_string_variable_labels());
409     hdf5Stream->attach_scale(data_name, eval_ids, "evaluation_ids", 0);
410     hdf5Stream->attach_scale(data_name, labels_name, "variables", 1);
411     hdf5Stream->store_vector(ids_name, variables.all_discrete_string_variable_ids());
412     hdf5Stream->attach_scale(data_name, ids_name, "ids", 1);
413 
414     UShortMultiArrayConstView types = variables.all_discrete_string_variable_types();
415     StringArray type_labels(variables.adsv());
416     std::transform(types.begin(), types.end(), type_labels.begin(),
417         [](const unsigned short t){return variableTypes.at(t);});
418     hdf5Stream->store_vector(types_name, type_labels);
419     hdf5Stream->attach_scale(data_name, types_name, "types", 1);
420   }
421 
422   if(variables.adrv()) {
423     String data_name = variables_root_group + "discrete_real";
424     String labels_name = variables_scale_root + "discrete_real_descriptors";
425     String ids_name = variables_scale_root + "discrete_real_ids";
426     String types_name = variables_scale_root + "discrete_real_types";
427 
428     hdf5Stream->create_empty_dataset(data_name, {0, int(variables.adrv())},
429         ResultsOutputType::REAL, HDF5_CHUNK_SIZE);
430     hdf5Stream->store_vector(labels_name,
431                              variables.all_discrete_real_variable_labels());
432     hdf5Stream->attach_scale(data_name, eval_ids, "evaluation_ids", 0);
433     hdf5Stream->attach_scale(data_name, labels_name, "variables", 1);
434     hdf5Stream->store_vector(ids_name, variables.all_discrete_real_variable_ids());
435     hdf5Stream->attach_scale(data_name, ids_name, "ids", 1);
436 
437     UShortMultiArrayConstView types = variables.all_discrete_real_variable_types();
438     StringArray type_labels(variables.adrv());
439     std::transform(types.begin(), types.end(), type_labels.begin(),
440         [](const unsigned short t){return variableTypes.at(t);});
441     hdf5Stream->store_vector(types_name, type_labels);
442     hdf5Stream->attach_scale(data_name, types_name, "types", 1);
443   }
444 #else
445   return;
446 #endif
447 }
448 
449 void EvaluationStore::
store_parameters_for_continuous_design(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)450 store_parameters_for_continuous_design(const size_t start_rv,
451     const size_t num_rv,
452     const String &location,
453     Pecos::MarginalsCorrDistribution *mvd_rep) {
454 #ifdef DAKOTA_HAVE_HDF5
455   // pecos rv type: Pecos::CONTINUOUS_RANGE
456   // parameters: Pecos::CR_LWR_BND, Pecos::CR_UPR_BND
457   RealArray lbs, ubs;
458   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::CR_LWR_BND, lbs);
459   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::CR_UPR_BND, ubs);
460   std::vector<VariableParametersField> fields = {
461     VariableParametersField("lower_bound", ResultsOutputType::REAL),
462     VariableParametersField("upper_bound", ResultsOutputType::REAL)
463   };
464   IntArray dims = {int(num_rv)};
465   hdf5Stream->create_empty_dataset(location, dims, fields);
466   hdf5Stream->set_vector_scalar_field(location, lbs, "lower_bound");
467   hdf5Stream->set_vector_scalar_field(location, ubs, "upper_bound");
468 #else
469   return;
470 #endif
471 }
472 
473 void EvaluationStore::
store_parameters_for_discrete_design_range(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)474 store_parameters_for_discrete_design_range(const size_t start_rv,
475     const size_t num_rv,
476     const String &location,
477     Pecos::MarginalsCorrDistribution *mvd_rep) {
478 #ifdef DAKOTA_HAVE_HDF5
479   // pecos rv type: Pecos::DISCRETE_RANGE
480   // parameters: Pecos::DR_LWR_BND, Pecos::DR_UPR_BND
481   IntArray lbs, ubs;
482   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::DR_LWR_BND, lbs);
483   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::DR_UPR_BND, ubs);
484   std::vector<VariableParametersField> fields = {
485     VariableParametersField("lower_bound", ResultsOutputType::INTEGER),
486     VariableParametersField("upper_bound", ResultsOutputType::INTEGER)
487   };
488   IntArray dims = {int(num_rv)};
489   hdf5Stream->create_empty_dataset(location, dims, fields);
490   hdf5Stream->set_vector_scalar_field(location, lbs, "lower_bound");
491   hdf5Stream->set_vector_scalar_field(location, ubs, "upper_bound");
492 #else
493   return;
494 #endif
495 }
496 
497 void EvaluationStore::
store_parameters_for_discrete_design_set_int(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)498 store_parameters_for_discrete_design_set_int(const size_t start_rv,
499     const size_t num_rv,
500     const String &location,
501     Pecos::MarginalsCorrDistribution *mvd_rep) {
502 #ifdef DAKOTA_HAVE_HDF5
503   // pecos rv type: Pecos::DISCRETE_SET_INT
504   // parameters: Pecos::DSI_VALUES
505   IntSetArray isa;
506   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::DSI_VALUES, isa);
507   // Because h5py barfs on vlen datasets of vlen strings, we have to
508   // use regular, fixed-sized datasets that are big enough to hold the
509   // maximum number of elements.
510   size_t max_num_elements = 0;
511   IntArray num_elements;
512   for(const auto &e : isa) {
513     num_elements.push_back(e.size());
514     max_num_elements = (max_num_elements > e.size()) ? max_num_elements : e.size();
515   }
516   // Populate a 1D array with ALL the elements, including padding
517   IntArray all_elements(num_rv * max_num_elements, INT_DSET_FILL_VAL);
518   for(int i = 0; i < num_rv; ++i)
519     std::copy(isa[i].begin(), isa[i].end(), &all_elements[i*max_num_elements]);
520 
521   std::vector<VariableParametersField> fields = {
522     VariableParametersField("num_elements", ResultsOutputType::INTEGER),
523     VariableParametersField("elements", ResultsOutputType::INTEGER, {max_num_elements}),
524   };
525   IntArray dims = {int(num_rv)};
526   hdf5Stream->create_empty_dataset(location, dims, fields);
527   hdf5Stream->set_vector_scalar_field(location, num_elements, "num_elements");
528   hdf5Stream->set_vector_vector_field(location, all_elements, max_num_elements, "elements");
529 #else
530   return;
531 #endif
532 }
533 
534 void EvaluationStore::
store_parameters_for_discrete_design_set_string(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)535 store_parameters_for_discrete_design_set_string(const size_t start_rv,
536     const size_t num_rv,
537     const String &location,
538     Pecos::MarginalsCorrDistribution *mvd_rep) {
539 #ifdef DAKOTA_HAVE_HDF5
540   // pecos rv type: Pecos::DISCRETE_SET_INT
541   // parameters: Pecos::DSI_VALUES
542   StringSetArray ssa;
543   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::DSS_VALUES, ssa);
544   // Because h5py barfs on vlen datasets of vlen strings, we have to
545   // use regular, fixed-sized datasets that are big enough to hold the
546   // maximum number of elements.
547   size_t max_num_elements = 0;
548   IntArray num_elements;
549   for(const auto &e : ssa) {
550     num_elements.push_back(e.size());
551     max_num_elements = (max_num_elements > e.size()) ? max_num_elements : e.size();
552   }
553   // Populate a 1D array with ALL the elements, including padding
554   StringArray all_elements(num_rv * max_num_elements, STR_DSET_FILL_VAL);
555   for(int i = 0; i < num_rv; ++i)
556     std::copy(ssa[i].begin(), ssa[i].end(), &all_elements[i*max_num_elements]);
557 
558   std::vector<VariableParametersField> fields = {
559     VariableParametersField("num_elements", ResultsOutputType::INTEGER),
560     VariableParametersField("elements", ResultsOutputType::STRING, {max_num_elements}),
561   };
562   IntArray dims = {int(num_rv)};
563   hdf5Stream->create_empty_dataset(location, dims, fields);
564   hdf5Stream->set_vector_scalar_field(location, num_elements, "num_elements");
565   hdf5Stream->set_vector_vector_field(location, all_elements, max_num_elements, "elements");
566 #else
567   return;
568 #endif
569 }
570 
571 void EvaluationStore::
store_parameters_for_discrete_design_set_real(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)572 store_parameters_for_discrete_design_set_real(const size_t start_rv,
573     const size_t num_rv,
574     const String &location,
575     Pecos::MarginalsCorrDistribution *mvd_rep) {
576 #ifdef DAKOTA_HAVE_HDF5
577   // pecos rv type: Pecos::DISCRETE_SET_INT
578   // parameters: Pecos::DSI_VALUES
579   RealSetArray rsa;
580   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::DSR_VALUES, rsa);
581   // Because h5py barfs on vlen datasets of vlen strings, we have to
582   // use regular, fixed-sized datasets that are big enough to hold the
583   // maximum number of elements.
584   size_t max_num_elements = 0;
585   IntArray num_elements;
586   for(const auto &e : rsa) {
587     num_elements.push_back(e.size());
588     max_num_elements = (max_num_elements > e.size()) ? max_num_elements : e.size();
589   }
590   // Populate a 1D array with ALL the elements, including padding
591   RealArray all_elements(num_rv * max_num_elements, REAL_DSET_FILL_VAL);
592   for(int i = 0; i < num_rv; ++i)
593     std::copy(rsa[i].begin(), rsa[i].end(), &all_elements[i*max_num_elements]);
594 
595   std::vector<VariableParametersField> fields = {
596     VariableParametersField("num_elements", ResultsOutputType::INTEGER),
597     VariableParametersField("elements", ResultsOutputType::REAL, {max_num_elements}),
598   };
599   IntArray dims = {int(num_rv)};
600   hdf5Stream->create_empty_dataset(location, dims, fields);
601   hdf5Stream->set_vector_scalar_field(location, num_elements, "num_elements");
602   hdf5Stream->set_vector_vector_field(location, all_elements, max_num_elements, "elements");
603 #else
604   return;
605 #endif
606 }
607 
608 void EvaluationStore::
store_parameters_for_normal_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)609 store_parameters_for_normal_uncertain(const size_t start_rv,
610     const size_t num_rv,
611     const String &location,
612     Pecos::MarginalsCorrDistribution *mvd_rep) {
613 #ifdef DAKOTA_HAVE_HDF5
614   // pecos rv types: Pecos::NORMAL, Pecos::BOUNDED_NORMAL
615   // parameters: Pecos::N_MEAN, Pecos::N_STD_DEV, Pecos::N_LWR_BND, Pecos::N_UPR_BND
616   // Use count-based API for lookup since there are two possible Pecos var types
617   RealArray means, std_devs, lbs, ubs;
618   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::N_MEAN, means);
619   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::N_STD_DEV, std_devs);
620   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::N_LWR_BND, lbs);
621   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::N_UPR_BND, ubs);
622   std::vector<VariableParametersField> fields = {
623     VariableParametersField("mean", ResultsOutputType::REAL),
624     VariableParametersField("std_deviation", ResultsOutputType::REAL),
625     VariableParametersField("lower_bound", ResultsOutputType::REAL),
626     VariableParametersField("upper_bound", ResultsOutputType::REAL)
627   };
628   IntArray dims = {int(num_rv)};
629   hdf5Stream->create_empty_dataset(location, dims, fields);
630   hdf5Stream->set_vector_scalar_field(location, means, "mean");
631   hdf5Stream->set_vector_scalar_field(location, std_devs, "std_deviation");
632   hdf5Stream->set_vector_scalar_field(location, lbs, "lower_bound");
633   hdf5Stream->set_vector_scalar_field(location, ubs, "upper_bound");
634 #else
635   return;
636 #endif
637 }
638 
639 void EvaluationStore::
store_parameters_for_uniform_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)640 store_parameters_for_uniform_uncertain(const size_t start_rv,
641     const size_t num_rv,
642     const String &location,
643     Pecos::MarginalsCorrDistribution *mvd_rep) {
644 #ifdef DAKOTA_HAVE_HDF5
645   // pecos rv types: Pecos::UNIFORM
646   // parameters: Pecos::U_LWR_BND, Pecos::U_UPR_BND
647   // Use count-based API for lookup since there are two possible Pecos var types
648   RealArray lbs, ubs;
649   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::U_LWR_BND, lbs);
650   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::U_UPR_BND, ubs);
651   std::vector<VariableParametersField> fields = {
652     VariableParametersField("lower_bound", ResultsOutputType::REAL),
653     VariableParametersField("upper_bound", ResultsOutputType::REAL)
654   };
655   IntArray dims = {int(num_rv)};
656   hdf5Stream->create_empty_dataset(location, dims, fields);
657   hdf5Stream->set_vector_scalar_field(location, lbs, "lower_bound");
658   hdf5Stream->set_vector_scalar_field(location, ubs, "upper_bound");
659 #else
660   return;
661 #endif
662 }
663 
664 void EvaluationStore::
store_parameters_for_lognormal_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)665 store_parameters_for_lognormal_uncertain(const size_t start_rv,
666     const size_t num_rv,
667     const String &location,
668     Pecos::MarginalsCorrDistribution *mvd_rep) {
669 #ifdef DAKOTA_HAVE_HDF5
670   // pecos rv types: Pecos::LOGNORMAL, BOUNDED_LOGNORMAL
671   // parameters: (LN_MEAN with LN_STD_DEV or LN_ERR_FACT) OR
672   //             (LN_LAMBDA with LN_ZETA)
673   //             LN_LWR_BND, LN_UPR_BND
674   RealArray lbs, ubs, means, std_devs, err_facts, lambdas, zetas;
675   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::LN_LWR_BND, lbs);
676   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::LN_UPR_BND, ubs);
677   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::LN_MEAN, means);
678   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::LN_STD_DEV, std_devs);
679   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::LN_ERR_FACT, err_facts);
680   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::LN_LAMBDA, lambdas);
681   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::LN_ZETA, zetas);
682   std::vector<VariableParametersField> fields = {
683     VariableParametersField("lower_bound", ResultsOutputType::REAL),
684     VariableParametersField("upper_bound", ResultsOutputType::REAL),
685     VariableParametersField("mean", ResultsOutputType::REAL),
686     VariableParametersField("std_deviation", ResultsOutputType::REAL),
687     VariableParametersField("error_factor", ResultsOutputType::REAL),
688     VariableParametersField("lambda", ResultsOutputType::REAL),
689     VariableParametersField("zeta", ResultsOutputType::REAL)
690   };
691   IntArray dims = {int(num_rv)};
692   hdf5Stream->create_empty_dataset(location, dims, fields);
693   hdf5Stream->set_vector_scalar_field(location, lbs, "lower_bound");
694   hdf5Stream->set_vector_scalar_field(location, ubs, "upper_bound");
695   hdf5Stream->set_vector_scalar_field(location, means, "mean");
696   hdf5Stream->set_vector_scalar_field(location, std_devs, "std_deviation");
697   hdf5Stream->set_vector_scalar_field(location, err_facts, "error_factor");
698   hdf5Stream->set_vector_scalar_field(location, lambdas, "lambda");
699   hdf5Stream->set_vector_scalar_field(location, zetas, "zeta");
700 #else
701   return;
702 #endif
703 }
704 
705 void EvaluationStore::
store_parameters_for_loguniform_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)706 store_parameters_for_loguniform_uncertain(const size_t start_rv,
707     const size_t num_rv,
708     const String &location,
709     Pecos::MarginalsCorrDistribution *mvd_rep) {
710 #ifdef DAKOTA_HAVE_HDF5
711   // pecos rv types: Pecos::LOGUNIFORM
712   // parameters:  LU_LWR_BND, LU_UPR_BND
713   RealArray lbs, ubs;
714   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::LU_LWR_BND, lbs);
715   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::LU_UPR_BND, ubs);
716   std::vector<VariableParametersField> fields = {
717     VariableParametersField("lower_bound", ResultsOutputType::REAL),
718     VariableParametersField("upper_bound", ResultsOutputType::REAL),
719   };
720   IntArray dims = {int(num_rv)};
721   hdf5Stream->create_empty_dataset(location, dims, fields);
722   hdf5Stream->set_vector_scalar_field(location, lbs, "lower_bound");
723   hdf5Stream->set_vector_scalar_field(location, ubs, "upper_bound");
724 #else
725   return;
726 #endif
727 }
728 
729 void EvaluationStore::
store_parameters_for_triangular_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)730 store_parameters_for_triangular_uncertain(const size_t start_rv,
731     const size_t num_rv,
732     const String &location,
733     Pecos::MarginalsCorrDistribution *mvd_rep) {
734 #ifdef DAKOTA_HAVE_HDF5
735   // pecos rv types: Pecos::TRIANGULAR
736   // parameters:  T_LWR_BND, T_UPR_BND, T_MODE
737   RealArray lbs, ubs, modes;
738   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::T_MODE, modes);
739   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::T_LWR_BND, lbs);
740   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::T_UPR_BND, ubs);
741   std::vector<VariableParametersField> fields = {
742     VariableParametersField("mode", ResultsOutputType::REAL),
743     VariableParametersField("lower_bound", ResultsOutputType::REAL),
744     VariableParametersField("upper_bound", ResultsOutputType::REAL),
745   };
746   IntArray dims = {int(num_rv)};
747   hdf5Stream->create_empty_dataset(location, dims, fields);
748   hdf5Stream->set_vector_scalar_field(location, modes, "mode");
749   hdf5Stream->set_vector_scalar_field(location, lbs, "lower_bound");
750   hdf5Stream->set_vector_scalar_field(location, ubs, "upper_bound");
751 #else
752   return;
753 #endif
754 }
755 
756 void EvaluationStore::
store_parameters_for_exponential_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)757 store_parameters_for_exponential_uncertain(const size_t start_rv,
758     const size_t num_rv,
759     const String &location,
760     Pecos::MarginalsCorrDistribution *mvd_rep) {
761 #ifdef DAKOTA_HAVE_HDF5
762   // pecos rv types: Pecos::EXPONENTIAL
763   // parameters:  E_BETA
764   RealArray betas;
765   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::E_BETA, betas);
766   std::vector<VariableParametersField> fields = {
767     VariableParametersField("beta", ResultsOutputType::REAL),
768   };
769   IntArray dims = {int(num_rv)};
770   hdf5Stream->create_empty_dataset(location, dims, fields);
771   hdf5Stream->set_vector_scalar_field(location, betas, "beta");
772 #else
773   return;
774 #endif
775 }
776 
777 void EvaluationStore::
store_parameters_for_beta_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)778 store_parameters_for_beta_uncertain(const size_t start_rv,
779     const size_t num_rv,
780     const String &location,
781     Pecos::MarginalsCorrDistribution *mvd_rep) {
782 #ifdef DAKOTA_HAVE_HDF5
783   // pecos rv types: Pecos::BETA
784   // parameters:  BE_ALPHA, BE_BETA, BE_LWR_BND, BE_UPR_BND
785   RealArray alphas, betas, lbs, ubs;
786   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::BE_ALPHA, alphas);
787   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::BE_BETA, betas);
788   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::BE_LWR_BND, lbs);
789   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::BE_UPR_BND, ubs);
790   std::vector<VariableParametersField> fields = {
791     VariableParametersField("alpha", ResultsOutputType::REAL),
792     VariableParametersField("beta", ResultsOutputType::REAL),
793     VariableParametersField("lower_bound", ResultsOutputType::REAL),
794     VariableParametersField("upper_bound", ResultsOutputType::REAL),
795   };
796   IntArray dims = {int(num_rv)};
797   hdf5Stream->create_empty_dataset(location, dims, fields);
798   hdf5Stream->set_vector_scalar_field(location, alphas, "alpha");
799   hdf5Stream->set_vector_scalar_field(location, betas, "beta");
800   hdf5Stream->set_vector_scalar_field(location, lbs, "lower_bound");
801   hdf5Stream->set_vector_scalar_field(location, ubs, "upper_bound");
802 #else
803   return;
804 #endif
805 }
806 
807 void EvaluationStore::
store_parameters_for_gamma_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)808 store_parameters_for_gamma_uncertain(const size_t start_rv,
809     const size_t num_rv,
810     const String &location,
811     Pecos::MarginalsCorrDistribution *mvd_rep) {
812 #ifdef DAKOTA_HAVE_HDF5
813   // pecos rv types: Pecos::GAMMA
814   // parameters:  GA_ALPHA, GA_BETA
815   RealArray alphas, betas;
816   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::GA_ALPHA, alphas);
817   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::GA_BETA, betas);
818   std::vector<VariableParametersField> fields = {
819     VariableParametersField("alpha", ResultsOutputType::REAL),
820     VariableParametersField("beta", ResultsOutputType::REAL),
821   };
822   IntArray dims = {int(num_rv)};
823   hdf5Stream->create_empty_dataset(location, dims, fields);
824   hdf5Stream->set_vector_scalar_field(location, alphas, "alpha");
825   hdf5Stream->set_vector_scalar_field(location, betas, "beta");
826 #else
827   return;
828 #endif
829 }
830 
831 void EvaluationStore::
store_parameters_for_gumbel_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)832 store_parameters_for_gumbel_uncertain(const size_t start_rv,
833     const size_t num_rv,
834     const String &location,
835     Pecos::MarginalsCorrDistribution *mvd_rep) {
836 #ifdef DAKOTA_HAVE_HDF5
837   // pecos rv types: Pecos::GUMBEL
838   // parameters:  GU_ALPHA, GU_BETA
839   RealArray alphas, betas;
840   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::GU_ALPHA, alphas);
841   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::GU_BETA, betas);
842   std::vector<VariableParametersField> fields = {
843     VariableParametersField("alpha", ResultsOutputType::REAL),
844     VariableParametersField("beta", ResultsOutputType::REAL),
845   };
846   IntArray dims = {int(num_rv)};
847   hdf5Stream->create_empty_dataset(location, dims, fields);
848   hdf5Stream->set_vector_scalar_field(location, alphas, "alpha");
849   hdf5Stream->set_vector_scalar_field(location, betas, "beta");
850 #else
851   return;
852 #endif
853 }
854 
855 void EvaluationStore::
store_parameters_for_frechet_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)856 store_parameters_for_frechet_uncertain(const size_t start_rv,
857     const size_t num_rv,
858     const String &location,
859     Pecos::MarginalsCorrDistribution *mvd_rep) {
860 #ifdef DAKOTA_HAVE_HDF5
861   // pecos rv types: Pecos::FRECHET
862   // parameters:  F_ALPHA, F_BETA
863   RealArray alphas, betas;
864   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::F_ALPHA, alphas);
865   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::F_BETA, betas);
866   std::vector<VariableParametersField> fields = {
867     VariableParametersField("alpha", ResultsOutputType::REAL),
868     VariableParametersField("beta", ResultsOutputType::REAL),
869   };
870   IntArray dims = {int(num_rv)};
871   hdf5Stream->create_empty_dataset(location, dims, fields);
872   hdf5Stream->set_vector_scalar_field(location, alphas, "alpha");
873   hdf5Stream->set_vector_scalar_field(location, betas, "beta");
874 #else
875   return;
876 #endif
877 }
878 
879 void EvaluationStore::
store_parameters_for_weibull_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)880 store_parameters_for_weibull_uncertain(const size_t start_rv,
881     const size_t num_rv,
882     const String &location,
883     Pecos::MarginalsCorrDistribution *mvd_rep) {
884 #ifdef DAKOTA_HAVE_HDF5
885   // pecos rv types: Pecos::WEIBULL
886   // parameters:  W_ALPHA, W_BETA
887   RealArray alphas, betas;
888   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::W_ALPHA, alphas);
889   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::W_BETA, betas);
890   std::vector<VariableParametersField> fields = {
891     VariableParametersField("alpha", ResultsOutputType::REAL),
892     VariableParametersField("beta", ResultsOutputType::REAL),
893   };
894   IntArray dims = {int(num_rv)};
895   hdf5Stream->create_empty_dataset(location, dims, fields);
896   hdf5Stream->set_vector_scalar_field(location, alphas, "alpha");
897   hdf5Stream->set_vector_scalar_field(location, betas, "beta");
898 #else
899   return;
900 #endif
901 }
902 
903 void EvaluationStore::
store_parameters_for_histogram_bin_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)904 store_parameters_for_histogram_bin_uncertain(const size_t start_rv,
905     const size_t num_rv,
906     const String &location,
907     Pecos::MarginalsCorrDistribution *mvd_rep) {
908 #ifdef DAKOTA_HAVE_HDF5
909   // pecos rv type: Pecos::HISTOGRAM_BIN
910   // parameters: Pecos::H_BIN_PAIRS
911   RealRealMapArray bin_pairs;
912   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::H_BIN_PAIRS, bin_pairs);
913   // Because h5py barfs on vlen datasets of vlen strings, we have to
914   // use regular, fixed-sized datasets that are big enough to hold the
915   // maximum number of elements.
916   size_t max_num_elements = 0;
917   SizetArray num_elements;
918   for(const auto &p : bin_pairs) {
919     num_elements.push_back(p.size());
920     max_num_elements = (max_num_elements > p.size()) ? max_num_elements : p.size();
921   }
922   // Populate a 1D array with ALL the elements, including padding
923   RealArray counts(num_rv*max_num_elements, REAL_DSET_FILL_VAL),
924             abscissas(num_rv*max_num_elements, REAL_DSET_FILL_VAL);
925   for(int i = 0; i < num_rv; ++i) {
926     std::transform(bin_pairs[i].begin(), bin_pairs[i].end(),
927         &abscissas[i*max_num_elements],
928         [](const std::pair<Real,Real> &p){return p.first;});
929     std::transform(bin_pairs[i].begin(), bin_pairs[i].end(),
930         &counts[i*max_num_elements],
931         [](const std::pair<Real,Real> &p){return p.second;});
932   }
933 
934   std::vector<VariableParametersField> fields = {
935     VariableParametersField("num_elements", ResultsOutputType::INTEGER),
936     VariableParametersField("abscissas", ResultsOutputType::REAL, {max_num_elements}),
937     VariableParametersField("counts", ResultsOutputType::REAL, {max_num_elements}),
938   };
939   IntArray dims = {int(num_rv)};
940   hdf5Stream->create_empty_dataset(location, dims, fields);
941   hdf5Stream->set_vector_scalar_field(location, num_elements, "num_elements");
942   hdf5Stream->set_vector_vector_field(location, abscissas, max_num_elements, "abscissas");
943   hdf5Stream->set_vector_vector_field(location, counts, max_num_elements, "counts");
944 #else
945   return;
946 #endif
947 }
948 
949 void EvaluationStore::
store_parameters_for_poisson_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)950 store_parameters_for_poisson_uncertain(const size_t start_rv,
951     const size_t num_rv,
952     const String &location,
953     Pecos::MarginalsCorrDistribution *mvd_rep) {
954 #ifdef DAKOTA_HAVE_HDF5
955   // pecos rv types: Pecos::POISSON
956   // parameters: P_LAMBDA
957   RealArray lambdas;
958   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::P_LAMBDA, lambdas);
959   std::vector<VariableParametersField> fields = {
960     VariableParametersField("lambda", ResultsOutputType::REAL),
961   };
962   IntArray dims = {int(num_rv)};
963   hdf5Stream->create_empty_dataset(location, dims, fields);
964   hdf5Stream->set_vector_scalar_field(location, lambdas, "lambda");
965 #else
966   return;
967 #endif
968 }
969 
970 void EvaluationStore::
store_parameters_for_binomial_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)971 store_parameters_for_binomial_uncertain(const size_t start_rv,
972     const size_t num_rv,
973     const String &location,
974     Pecos::MarginalsCorrDistribution *mvd_rep) {
975 #ifdef DAKOTA_HAVE_HDF5
976   // pecos rv types: Pecos::BINOMIAL
977   // parameters: BI_P_PER_TRIAL, BI_TRIALS
978   RealArray p_per_trial;
979   UIntArray trials;
980   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::BI_P_PER_TRIAL, p_per_trial);
981   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::BI_TRIALS, trials);
982   std::vector<VariableParametersField> fields = {
983     VariableParametersField("probability_per_trial", ResultsOutputType::REAL),
984     VariableParametersField("num_trials", ResultsOutputType::UINTEGER),
985   };
986   IntArray dims = {int(num_rv)};
987   hdf5Stream->create_empty_dataset(location, dims, fields);
988   hdf5Stream->set_vector_scalar_field(location, p_per_trial, "probability_per_trial");
989   hdf5Stream->set_vector_scalar_field(location, trials, "num_trials");
990 #else
991   return;
992 #endif
993 }
994 
995 void EvaluationStore::
store_parameters_for_negative_binomial_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)996 store_parameters_for_negative_binomial_uncertain(const size_t start_rv,
997     const size_t num_rv,
998     const String &location,
999     Pecos::MarginalsCorrDistribution *mvd_rep) {
1000 #ifdef DAKOTA_HAVE_HDF5
1001   // pecos rv types: Pecos::NEGATIVE_BINOMIAL
1002   // parameters: NBI_P_PER_TRIAL, NBI_TRIALS
1003   RealArray p_per_trial;
1004   UIntArray trials;
1005   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::NBI_P_PER_TRIAL, p_per_trial);
1006   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::NBI_TRIALS, trials);
1007   std::vector<VariableParametersField> fields = {
1008     VariableParametersField("probability_per_trial", ResultsOutputType::REAL),
1009     VariableParametersField("num_trials", ResultsOutputType::UINTEGER),
1010   };
1011   IntArray dims = {int(num_rv)};
1012   hdf5Stream->create_empty_dataset(location, dims, fields);
1013   hdf5Stream->set_vector_scalar_field(location, p_per_trial, "probability_per_trial");
1014   hdf5Stream->set_vector_scalar_field(location, trials, "num_trials");
1015 #else
1016   return;
1017 #endif
1018 }
1019 
1020 void EvaluationStore::
store_parameters_for_geometric_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)1021 store_parameters_for_geometric_uncertain(const size_t start_rv,
1022     const size_t num_rv,
1023     const String &location,
1024     Pecos::MarginalsCorrDistribution *mvd_rep) {
1025 #ifdef DAKOTA_HAVE_HDF5
1026   // pecos rv types: Pecos::GEOMETRIC
1027   // parameters: GE_P_PER_TRIAL
1028   RealArray p_per_trial;
1029   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::GE_P_PER_TRIAL, p_per_trial);
1030   std::vector<VariableParametersField> fields = {
1031     VariableParametersField("probability_per_trial", ResultsOutputType::REAL),
1032   };
1033   IntArray dims = {int(num_rv)};
1034   hdf5Stream->create_empty_dataset(location, dims, fields);
1035   hdf5Stream->set_vector_scalar_field(location, p_per_trial, "probability_per_trial");
1036 #else
1037   return;
1038 #endif
1039 }
1040 
1041 void EvaluationStore::
store_parameters_for_hypergeometric_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)1042 store_parameters_for_hypergeometric_uncertain(const size_t start_rv,
1043     const size_t num_rv,
1044     const String &location,
1045     Pecos::MarginalsCorrDistribution *mvd_rep) {
1046 #ifdef DAKOTA_HAVE_HDF5
1047   // pecos rv types: Pecos::HYPERGEOMETRIC
1048   // parameters: HGE_TOT_POP, HGE_SEL_POP, HGE_DRAWN
1049   UIntArray tot_pop, sel_pop, num_drawn;
1050   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::HGE_TOT_POP, tot_pop);
1051   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::HGE_SEL_POP, sel_pop);
1052   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::HGE_DRAWN, num_drawn);
1053   std::vector<VariableParametersField> fields = {
1054     VariableParametersField("total_population", ResultsOutputType::UINTEGER),
1055     VariableParametersField("selected_population", ResultsOutputType::UINTEGER),
1056     VariableParametersField("num_drawn", ResultsOutputType::UINTEGER),
1057   };
1058   IntArray dims = {int(num_rv)};
1059   hdf5Stream->create_empty_dataset(location, dims, fields);
1060   hdf5Stream->set_vector_scalar_field(location, tot_pop, "total_population");
1061   hdf5Stream->set_vector_scalar_field(location, sel_pop, "selected_population");
1062   hdf5Stream->set_vector_scalar_field(location, num_drawn, "num_drawn");
1063 #else
1064   return;
1065 #endif
1066 }
1067 
1068 void EvaluationStore::
store_parameters_for_histogram_point_uncertain_int(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)1069 store_parameters_for_histogram_point_uncertain_int(const size_t start_rv,
1070     const size_t num_rv,
1071     const String &location,
1072     Pecos::MarginalsCorrDistribution *mvd_rep) {
1073 #ifdef DAKOTA_HAVE_HDF5
1074   // pecos rv type: Pecos::HISTOGRAM_PT_INT
1075   // parameters: Pecos::H_PT_INT_PAIRS
1076   IntRealMapArray bin_pairs;
1077   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::H_PT_INT_PAIRS, bin_pairs);
1078   // Because h5py barfs on vlen datasets of vlen strings, we have to
1079   // use regular, fixed-sized datasets that are big enough to hold the
1080   // maximum number of elements.
1081   size_t max_num_elements = 0;
1082   SizetArray num_elements;
1083   for(const auto &p : bin_pairs) {
1084     num_elements.push_back(p.size());
1085     max_num_elements = (max_num_elements > p.size()) ? max_num_elements : p.size();
1086   }
1087   // Populate a 1D array with ALL the elements, including padding
1088   IntArray abscissas(num_rv*max_num_elements, INT_DSET_FILL_VAL);
1089   RealArray counts(num_rv*max_num_elements, REAL_DSET_FILL_VAL);
1090   for(int i = 0; i < num_rv; ++i) {
1091     std::transform(bin_pairs[i].begin(), bin_pairs[i].end(),
1092         &abscissas[i*max_num_elements],
1093         [](const std::pair<int,Real> &p){return p.first;});
1094     std::transform(bin_pairs[i].begin(), bin_pairs[i].end(),
1095         &counts[i*max_num_elements],
1096         [](const std::pair<int,Real> &p){return p.second;});
1097   }
1098 
1099   std::vector<VariableParametersField> fields = {
1100     VariableParametersField("num_elements", ResultsOutputType::INTEGER),
1101     VariableParametersField("abscissas", ResultsOutputType::INTEGER, {max_num_elements}),
1102     VariableParametersField("counts", ResultsOutputType::REAL, {max_num_elements}),
1103   };
1104   IntArray dims = {int(num_rv)};
1105   hdf5Stream->create_empty_dataset(location, dims, fields);
1106   hdf5Stream->set_vector_scalar_field(location, num_elements, "num_elements");
1107   hdf5Stream->set_vector_vector_field(location, abscissas, max_num_elements, "abscissas");
1108   hdf5Stream->set_vector_vector_field(location, counts, max_num_elements, "counts");
1109 #else
1110   return;
1111 #endif
1112 }
1113 
1114 void EvaluationStore::
store_parameters_for_histogram_point_uncertain_string(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)1115 store_parameters_for_histogram_point_uncertain_string(const size_t start_rv,
1116     const size_t num_rv,
1117     const String &location,
1118     Pecos::MarginalsCorrDistribution *mvd_rep) {
1119 #ifdef DAKOTA_HAVE_HDF5
1120   // pecos rv type: Pecos::HISTOGRAM_PT_STRING
1121   // parameters: Pecos::H_PT_STR_PAIRS
1122   StringRealMapArray bin_pairs;
1123   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::H_PT_STR_PAIRS, bin_pairs);
1124   // Because h5py barfs on vlen datasets of vlen strings, we have to
1125   // use regular, fixed-sized datasets that are big enough to hold the
1126   // maximum number of elements.
1127   size_t max_num_elements = 0;
1128   SizetArray num_elements;
1129   for(const auto &p : bin_pairs) {
1130     num_elements.push_back(p.size());
1131     max_num_elements = (max_num_elements > p.size()) ? max_num_elements : p.size();
1132   }
1133   // Populate a 1D array with ALL the elements, including padding
1134   StringArray abscissas(num_rv*max_num_elements, STR_DSET_FILL_VAL);
1135   RealArray counts(num_rv*max_num_elements, REAL_DSET_FILL_VAL);
1136   for(int i = 0; i < num_rv; ++i) {
1137     std::transform(bin_pairs[i].begin(), bin_pairs[i].end(),
1138         &abscissas[i*max_num_elements],
1139         [](const std::pair<String,Real> &p){return p.first;});
1140     std::transform(bin_pairs[i].begin(), bin_pairs[i].end(),
1141         &counts[i*max_num_elements],
1142         [](const std::pair<String,Real> &p){return p.second;});
1143   }
1144 
1145   std::vector<VariableParametersField> fields = {
1146     VariableParametersField("num_elements", ResultsOutputType::INTEGER),
1147     VariableParametersField("abscissas", ResultsOutputType::STRING, {max_num_elements}),
1148     VariableParametersField("counts", ResultsOutputType::REAL, {max_num_elements}),
1149   };
1150   IntArray dims = {int(num_rv)};
1151   hdf5Stream->create_empty_dataset(location, dims, fields);
1152   hdf5Stream->set_vector_scalar_field(location, num_elements, "num_elements");
1153   hdf5Stream->set_vector_vector_field(location, abscissas, max_num_elements, "abscissas");
1154   hdf5Stream->set_vector_vector_field(location, counts, max_num_elements, "counts");
1155 #else
1156   return;
1157 #endif
1158 }
1159 
1160 void EvaluationStore::
store_parameters_for_histogram_point_uncertain_real(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)1161 store_parameters_for_histogram_point_uncertain_real(const size_t start_rv,
1162     const size_t num_rv,
1163     const String &location,
1164     Pecos::MarginalsCorrDistribution *mvd_rep) {
1165 #ifdef DAKOTA_HAVE_HDF5
1166   // pecos rv type: Pecos::HISTOGRAM_PT_REAL
1167   // parameters: Pecos::H_PT_REAL_PAIRS
1168   RealRealMapArray bin_pairs;
1169   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::H_PT_REAL_PAIRS, bin_pairs);
1170   // Because h5py barfs on vlen datasets of vlen strings, we have to
1171   // use regular, fixed-sized datasets that are big enough to hold the
1172   // maximum number of elements.
1173   size_t max_num_elements = 0;
1174   SizetArray num_elements;
1175   for(const auto &p : bin_pairs) {
1176     num_elements.push_back(p.size());
1177     max_num_elements = (max_num_elements > p.size()) ? max_num_elements : p.size();
1178   }
1179   // Populate a 1D array with ALL the elements, including padding
1180   RealArray abscissas(num_rv*max_num_elements, REAL_DSET_FILL_VAL);
1181   RealArray counts(num_rv*max_num_elements, REAL_DSET_FILL_VAL);
1182   for(int i = 0; i < num_rv; ++i) {
1183     std::transform(bin_pairs[i].begin(), bin_pairs[i].end(),
1184         &abscissas[i*max_num_elements],
1185         [](const std::pair<Real,Real> &p){return p.first;});
1186     std::transform(bin_pairs[i].begin(), bin_pairs[i].end(),
1187         &counts[i*max_num_elements],
1188         [](const std::pair<Real,Real> &p){return p.second;});
1189   }
1190 
1191   std::vector<VariableParametersField> fields = {
1192     VariableParametersField("num_elements", ResultsOutputType::INTEGER),
1193     VariableParametersField("abscissas", ResultsOutputType::REAL, {max_num_elements}),
1194     VariableParametersField("counts", ResultsOutputType::REAL, {max_num_elements}),
1195   };
1196   IntArray dims = {int(num_rv)};
1197   hdf5Stream->create_empty_dataset(location, dims, fields);
1198   hdf5Stream->set_vector_scalar_field(location, num_elements, "num_elements");
1199   hdf5Stream->set_vector_vector_field(location, abscissas, max_num_elements, "abscissas");
1200   hdf5Stream->set_vector_vector_field(location, counts, max_num_elements, "counts");
1201 #else
1202   return;
1203 #endif
1204 }
1205 
1206 void EvaluationStore::
store_parameters_for_continuous_interval_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)1207 store_parameters_for_continuous_interval_uncertain(const size_t start_rv,
1208     const size_t num_rv,
1209     const String &location,
1210     Pecos::MarginalsCorrDistribution *mvd_rep) {
1211 #ifdef DAKOTA_HAVE_HDF5
1212   // pecos rv type: Pecos::CONTINUOUS_INTERVAL_UNCERTAIN
1213   // parameters: Pecos::CIU_BPA
1214   RealRealPairRealMapArray intervals;
1215   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::CIU_BPA, intervals);
1216   // Because h5py barfs on vlen datasets of vlen strings, we have to
1217   // use regular, fixed-sized datasets that are big enough to hold the
1218   // maximum number of elements.
1219   size_t max_num_elements = 0;
1220   SizetArray num_elements;
1221   for(const auto &p : intervals) {
1222     num_elements.push_back(p.size());
1223     max_num_elements = (max_num_elements > p.size()) ? max_num_elements : p.size();
1224   }
1225   // Populate a 1D array with ALL the elements, including padding
1226   RealArray probs(num_rv*max_num_elements, REAL_DSET_FILL_VAL);
1227   RealArray lbs(num_rv*max_num_elements, REAL_DSET_FILL_VAL);
1228   RealArray ubs(num_rv*max_num_elements, REAL_DSET_FILL_VAL);
1229   for(int i = 0; i < num_rv; ++i) {
1230     std::transform(intervals[i].begin(), intervals[i].end(),
1231         &lbs[i*max_num_elements],
1232         [](const std::pair<std::pair<Real,Real>, Real> &p){return p.first.first;});
1233     std::transform(intervals[i].begin(), intervals[i].end(),
1234         &ubs[i*max_num_elements],
1235         [](const std::pair<std::pair<Real,Real>, Real> &p){return p.first.second;});
1236     std::transform(intervals[i].begin(), intervals[i].end(),
1237         &probs[i*max_num_elements],
1238         [](const std::pair<std::pair<Real,Real>, Real> &p){return p.second;});
1239   }
1240 
1241   std::vector<VariableParametersField> fields = {
1242     VariableParametersField("num_elements", ResultsOutputType::INTEGER),
1243     VariableParametersField("interval_probabilities", ResultsOutputType::REAL, {max_num_elements}),
1244     VariableParametersField("lower_bounds", ResultsOutputType::REAL, {max_num_elements}),
1245     VariableParametersField("upper_bounds", ResultsOutputType::REAL, {max_num_elements}),
1246   };
1247   IntArray dims = {int(num_rv)};
1248   hdf5Stream->create_empty_dataset(location, dims, fields);
1249   hdf5Stream->set_vector_scalar_field(location, num_elements, "num_elements");
1250   hdf5Stream->set_vector_vector_field(location, probs, max_num_elements, "interval_probabilities");
1251   hdf5Stream->set_vector_vector_field(location, lbs, max_num_elements, "lower_bounds");
1252   hdf5Stream->set_vector_vector_field(location, ubs, max_num_elements, "upper_bounds");
1253 #else
1254   return;
1255 #endif
1256 }
1257 
1258 void EvaluationStore::
store_parameters_for_discrete_interval_uncertain(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)1259 store_parameters_for_discrete_interval_uncertain(const size_t start_rv,
1260     const size_t num_rv,
1261     const String &location,
1262     Pecos::MarginalsCorrDistribution *mvd_rep) {
1263 #ifdef DAKOTA_HAVE_HDF5
1264   // pecos rv type: Pecos::DISCRETE_INTERVAL_UNCERTAIN
1265   // parameters: Pecos::DIU_BPA
1266   IntIntPairRealMapArray intervals;
1267   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::DIU_BPA, intervals);
1268   // Because h5py barfs on vlen datasets of vlen strings, we have to
1269   // use regular, fixed-sized datasets that are big enough to hold the
1270   // maximum number of elements.
1271   size_t max_num_elements = 0;
1272   SizetArray num_elements;
1273   for(const auto &p : intervals) {
1274     num_elements.push_back(p.size());
1275     max_num_elements = (max_num_elements > p.size()) ? max_num_elements : p.size();
1276   }
1277   // Populate a 1D array with ALL the elements, including padding
1278   RealArray probs(num_rv*max_num_elements, REAL_DSET_FILL_VAL);
1279   IntArray lbs(num_rv*max_num_elements, INT_DSET_FILL_VAL);
1280   IntArray ubs(num_rv*max_num_elements, INT_DSET_FILL_VAL);
1281   for(int i = 0; i < num_rv; ++i) {
1282     std::transform(intervals[i].begin(), intervals[i].end(),
1283         &lbs[i*max_num_elements],
1284         [](const std::pair<std::pair<int,int>, Real> &p){return p.first.first;});
1285     std::transform(intervals[i].begin(), intervals[i].end(),
1286         &ubs[i*max_num_elements],
1287         [](const std::pair<std::pair<int,int>, Real> &p){return p.first.second;});
1288     std::transform(intervals[i].begin(), intervals[i].end(),
1289         &probs[i*max_num_elements],
1290         [](const std::pair<std::pair<int,int>, Real> &p){return p.second;});
1291   }
1292 
1293   std::vector<VariableParametersField> fields = {
1294     VariableParametersField("num_elements", ResultsOutputType::INTEGER),
1295     VariableParametersField("interval_probabilities", ResultsOutputType::REAL, {max_num_elements}),
1296     VariableParametersField("lower_bounds", ResultsOutputType::INTEGER, {max_num_elements}),
1297     VariableParametersField("upper_bounds", ResultsOutputType::INTEGER, {max_num_elements}),
1298   };
1299   IntArray dims = {int(num_rv)};
1300   hdf5Stream->create_empty_dataset(location, dims, fields);
1301   hdf5Stream->set_vector_scalar_field(location, num_elements, "num_elements");
1302   hdf5Stream->set_vector_vector_field(location, probs, max_num_elements, "interval_probabilities");
1303   hdf5Stream->set_vector_vector_field(location, lbs, max_num_elements, "lower_bounds");
1304   hdf5Stream->set_vector_vector_field(location, ubs, max_num_elements, "upper_bounds");
1305 #else
1306   return;
1307 #endif
1308 }
1309 
1310 void EvaluationStore::
store_parameters_for_discrete_uncertain_set_int(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)1311 store_parameters_for_discrete_uncertain_set_int(const size_t start_rv,
1312     const size_t num_rv,
1313     const String &location,
1314     Pecos::MarginalsCorrDistribution *mvd_rep) {
1315 #ifdef DAKOTA_HAVE_HDF5
1316   // pecos rv type: Pecos::DISCRETE_UNCERTAIN_SET_INT
1317   // parameters: Pecos::DUSI_VALUES_PROBS
1318   IntRealMapArray sets;
1319   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::DUSI_VALUES_PROBS, sets);
1320   // Because h5py barfs on vlen datasets of vlen strings, we have to
1321   // use regular, fixed-sized datasets that are big enough to hold the
1322   // maximum number of elements.
1323   size_t max_num_elements = 0;
1324   IntArray num_elements;
1325   for(const auto &e : sets) {
1326     num_elements.push_back(e.size());
1327     max_num_elements = (max_num_elements > e.size()) ? max_num_elements : e.size();
1328   }
1329   // Populate a 1D array with ALL the elements, including padding
1330   IntArray elements(num_rv * max_num_elements, INT_DSET_FILL_VAL);
1331   RealArray probs(num_rv * max_num_elements, REAL_DSET_FILL_VAL);
1332   for(int i = 0; i < num_rv; ++i) {
1333     std::transform(sets[i].begin(), sets[i].end(),
1334         &elements[i*max_num_elements],
1335         [](const std::pair<int, Real> &p){return p.first;});
1336     std::transform(sets[i].begin(), sets[i].end(),
1337         &probs[i*max_num_elements],
1338         [](const std::pair<int,Real> &p){return p.second;});
1339   }
1340 
1341   std::vector<VariableParametersField> fields = {
1342     VariableParametersField("num_elements", ResultsOutputType::INTEGER),
1343     VariableParametersField("elements", ResultsOutputType::INTEGER, {max_num_elements}),
1344     VariableParametersField("set_probabilities", ResultsOutputType::REAL, {max_num_elements}),
1345   };
1346   IntArray dims = {int(num_rv)};
1347   hdf5Stream->create_empty_dataset(location, dims, fields);
1348   hdf5Stream->set_vector_scalar_field(location, num_elements, "num_elements");
1349   hdf5Stream->set_vector_vector_field(location, elements, max_num_elements, "elements");
1350   hdf5Stream->set_vector_vector_field(location, probs, max_num_elements, "set_probabilities");
1351 #else
1352   return;
1353 #endif
1354 }
1355 
1356 void EvaluationStore::
store_parameters_for_discrete_uncertain_set_string(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)1357 store_parameters_for_discrete_uncertain_set_string(const size_t start_rv,
1358     const size_t num_rv,
1359     const String &location,
1360     Pecos::MarginalsCorrDistribution *mvd_rep) {
1361 #ifdef DAKOTA_HAVE_HDF5
1362   // pecos rv type: Pecos::DISCRETE_UNCERTAIN_SET_STRING
1363   // parameters: Pecos::DUSS_VALUES_PROBS
1364   StringRealMapArray sets;
1365   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::DUSS_VALUES_PROBS, sets);
1366   // Because h5py barfs on vlen datasets of vlen strings, we have to
1367   // use regular, fixed-sized datasets that are big enough to hold the
1368   // maximum number of elements.
1369   size_t max_num_elements = 0;
1370   IntArray num_elements;
1371   for(const auto &e : sets) {
1372     num_elements.push_back(e.size());
1373     max_num_elements = (max_num_elements > e.size()) ? max_num_elements : e.size();
1374   }
1375   // Populate a 1D array with ALL the elements, including padding
1376   StringArray elements(num_rv * max_num_elements, STR_DSET_FILL_VAL);
1377   RealArray probs(num_rv * max_num_elements, REAL_DSET_FILL_VAL);
1378   for(int i = 0; i < num_rv; ++i) {
1379     std::transform(sets[i].begin(), sets[i].end(),
1380         &elements[i*max_num_elements],
1381         [](const std::pair<String, Real> &p){return p.first;});
1382     std::transform(sets[i].begin(), sets[i].end(),
1383         &probs[i*max_num_elements],
1384         [](const std::pair<String,Real> &p){return p.second;});
1385   }
1386 
1387   std::vector<VariableParametersField> fields = {
1388     VariableParametersField("num_elements", ResultsOutputType::INTEGER),
1389     VariableParametersField("elements", ResultsOutputType::STRING, {max_num_elements}),
1390     VariableParametersField("set_probabilities", ResultsOutputType::REAL, {max_num_elements}),
1391   };
1392   IntArray dims = {int(num_rv)};
1393   hdf5Stream->create_empty_dataset(location, dims, fields);
1394   hdf5Stream->set_vector_scalar_field(location, num_elements, "num_elements");
1395   hdf5Stream->set_vector_vector_field(location, elements, max_num_elements, "elements");
1396   hdf5Stream->set_vector_vector_field(location, probs, max_num_elements, "set_probabilities");
1397 #else
1398   return;
1399 #endif
1400 }
1401 
1402 void EvaluationStore::
store_parameters_for_discrete_uncertain_set_real(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)1403 store_parameters_for_discrete_uncertain_set_real(const size_t start_rv,
1404     const size_t num_rv,
1405     const String &location,
1406     Pecos::MarginalsCorrDistribution *mvd_rep) {
1407 #ifdef DAKOTA_HAVE_HDF5
1408   // pecos rv type: Pecos::DISCRETE_UNCERTAIN_SET_REAL
1409   // parameters: Pecos::DUSR_VALUES_PROBS
1410   RealRealMapArray sets;
1411   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::DUSR_VALUES_PROBS, sets);
1412   // Because h5py barfs on vlen datasets of vlen strings, we have to
1413   // use regular, fixed-sized datasets that are big enough to hold the
1414   // maximum number of elements.
1415   size_t max_num_elements = 0;
1416   IntArray num_elements;
1417   for(const auto &e : sets) {
1418     num_elements.push_back(e.size());
1419     max_num_elements = (max_num_elements > e.size()) ? max_num_elements : e.size();
1420   }
1421   // Populate a 1D array with ALL the elements, including padding
1422   RealArray elements(num_rv * max_num_elements, REAL_DSET_FILL_VAL);
1423   RealArray probs(num_rv * max_num_elements, REAL_DSET_FILL_VAL);
1424   for(int i = 0; i < num_rv; ++i) {
1425     std::transform(sets[i].begin(), sets[i].end(),
1426         &elements[i*max_num_elements],
1427         [](const std::pair<Real, Real> &p){return p.first;});
1428     std::transform(sets[i].begin(), sets[i].end(),
1429         &probs[i*max_num_elements],
1430         [](const std::pair<Real,Real> &p){return p.second;});
1431   }
1432 
1433   std::vector<VariableParametersField> fields = {
1434     VariableParametersField("num_elements", ResultsOutputType::INTEGER),
1435     VariableParametersField("elements", ResultsOutputType::REAL, {max_num_elements}),
1436     VariableParametersField("set_probabilities", ResultsOutputType::REAL, {max_num_elements}),
1437   };
1438   IntArray dims = {int(num_rv)};
1439   hdf5Stream->create_empty_dataset(location, dims, fields);
1440   hdf5Stream->set_vector_scalar_field(location, num_elements, "num_elements");
1441   hdf5Stream->set_vector_vector_field(location, elements, max_num_elements, "elements");
1442   hdf5Stream->set_vector_vector_field(location, probs, max_num_elements, "set_probabilities");
1443 #else
1444   return;
1445 #endif
1446 }
1447 
1448 
1449 void EvaluationStore::
store_parameters_for_continuous_state(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)1450 store_parameters_for_continuous_state(const size_t start_rv,
1451     const size_t num_rv,
1452     const String &location,
1453     Pecos::MarginalsCorrDistribution *mvd_rep) {
1454 #ifdef DAKOTA_HAVE_HDF5
1455   // pecos rv type: Pecos::CONTINUOUS_RANGE
1456   // parameters: Pecos::CR_LWR_BND, Pecos::CR_UPR_BND
1457   RealArray lbs, ubs;
1458   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::CR_LWR_BND, lbs);
1459   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::CR_UPR_BND, ubs);
1460   std::vector<VariableParametersField> fields = {
1461     VariableParametersField("lower_bound", ResultsOutputType::REAL),
1462     VariableParametersField("upper_bound", ResultsOutputType::REAL)
1463   };
1464   IntArray dims = {int(num_rv)};
1465   hdf5Stream->create_empty_dataset(location, dims, fields);
1466   hdf5Stream->set_vector_scalar_field(location, lbs, "lower_bound");
1467   hdf5Stream->set_vector_scalar_field(location, ubs, "upper_bound");
1468 #else
1469   return;
1470 #endif
1471 }
1472 
1473 void EvaluationStore::
store_parameters_for_discrete_state_range(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)1474 store_parameters_for_discrete_state_range(const size_t start_rv,
1475     const size_t num_rv,
1476     const String &location,
1477     Pecos::MarginalsCorrDistribution *mvd_rep) {
1478 #ifdef DAKOTA_HAVE_HDF5
1479   // pecos rv type: Pecos::DISCRETE_RANGE
1480   // parameters: Pecos::DR_LWR_BND, Pecos::DR_UPR_BND
1481   IntArray lbs, ubs;
1482   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::DR_LWR_BND, lbs);
1483   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::DR_UPR_BND, ubs);
1484   std::vector<VariableParametersField> fields = {
1485     VariableParametersField("lower_bound", ResultsOutputType::INTEGER),
1486     VariableParametersField("upper_bound", ResultsOutputType::INTEGER)
1487   };
1488   IntArray dims = {int(num_rv)};
1489   hdf5Stream->create_empty_dataset(location, dims, fields);
1490   hdf5Stream->set_vector_scalar_field(location, lbs, "lower_bound");
1491   hdf5Stream->set_vector_scalar_field(location, ubs, "upper_bound");
1492 #else
1493   return;
1494 #endif
1495 }
1496 
1497 void EvaluationStore::
store_parameters_for_discrete_state_set_int(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)1498 store_parameters_for_discrete_state_set_int(const size_t start_rv,
1499     const size_t num_rv,
1500     const String &location,
1501     Pecos::MarginalsCorrDistribution *mvd_rep) {
1502 #ifdef DAKOTA_HAVE_HDF5
1503   // pecos rv type: Pecos::DISCRETE_SET_INT
1504   // parameters: Pecos::DSI_VALUES
1505   IntSetArray isa;
1506   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::DSI_VALUES, isa);
1507   // Because h5py barfs on vlen datasets of vlen strings, we have to
1508   // use regular, fixed-sized datasets that are big enough to hold the
1509   // maximum number of elements.
1510   size_t max_num_elements = 0;
1511   IntArray num_elements;
1512   for(const auto &e : isa) {
1513     num_elements.push_back(e.size());
1514     max_num_elements = (max_num_elements > e.size()) ? max_num_elements : e.size();
1515   }
1516   // Populate a 1D array with ALL the elements, including padding
1517   IntArray all_elements(num_rv * max_num_elements, INT_DSET_FILL_VAL);
1518   for(int i = 0; i < num_rv; ++i)
1519     std::copy(isa[i].begin(), isa[i].end(), &all_elements[i*max_num_elements]);
1520 
1521   std::vector<VariableParametersField> fields = {
1522     VariableParametersField("num_elements", ResultsOutputType::INTEGER),
1523     VariableParametersField("elements", ResultsOutputType::INTEGER, {max_num_elements}),
1524   };
1525   IntArray dims = {int(num_rv)};
1526   hdf5Stream->create_empty_dataset(location, dims, fields);
1527   hdf5Stream->set_vector_scalar_field(location, num_elements, "num_elements");
1528   hdf5Stream->set_vector_vector_field(location, all_elements, max_num_elements, "elements");
1529 #else
1530   return;
1531 #endif
1532 }
1533 
1534 void EvaluationStore::
store_parameters_for_discrete_state_set_string(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)1535 store_parameters_for_discrete_state_set_string(const size_t start_rv,
1536     const size_t num_rv,
1537     const String &location,
1538     Pecos::MarginalsCorrDistribution *mvd_rep) {
1539 #ifdef DAKOTA_HAVE_HDF5
1540   // pecos rv type: Pecos::DISCRETE_SET_INT
1541   // parameters: Pecos::DSI_VALUES
1542   StringSetArray ssa;
1543   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::DSS_VALUES, ssa);
1544   // Because h5py barfs on vlen datasets of vlen strings, we have to
1545   // use regular, fixed-sized datasets that are big enough to hold the
1546   // maximum number of elements.
1547   size_t max_num_elements = 0;
1548   IntArray num_elements;
1549   for(const auto &e : ssa) {
1550     num_elements.push_back(e.size());
1551     max_num_elements = (max_num_elements > e.size()) ? max_num_elements : e.size();
1552   }
1553   // Populate a 1D array with ALL the elements, including padding
1554   StringArray all_elements(num_rv * max_num_elements, STR_DSET_FILL_VAL);
1555   for(int i = 0; i < num_rv; ++i)
1556     std::copy(ssa[i].begin(), ssa[i].end(), &all_elements[i*max_num_elements]);
1557 
1558   std::vector<VariableParametersField> fields = {
1559     VariableParametersField("num_elements", ResultsOutputType::INTEGER),
1560     VariableParametersField("elements", ResultsOutputType::STRING, {max_num_elements}),
1561   };
1562   IntArray dims = {int(num_rv)};
1563   hdf5Stream->create_empty_dataset(location, dims, fields);
1564   hdf5Stream->set_vector_scalar_field(location, num_elements, "num_elements");
1565   hdf5Stream->set_vector_vector_field(location, all_elements, max_num_elements, "elements");
1566 #else
1567   return;
1568 #endif
1569 }
1570 
1571 void EvaluationStore::
store_parameters_for_discrete_state_set_real(const size_t start_rv,const size_t num_rv,const String & location,Pecos::MarginalsCorrDistribution * mvd_rep)1572 store_parameters_for_discrete_state_set_real(const size_t start_rv,
1573     const size_t num_rv,
1574     const String &location,
1575     Pecos::MarginalsCorrDistribution *mvd_rep) {
1576 #ifdef DAKOTA_HAVE_HDF5
1577   // pecos rv type: Pecos::DISCRETE_SET_REAL
1578   // parameters: Pecos::DSR_VALUES
1579   RealSetArray rsa;
1580   mvd_rep->pull_parameters(start_rv, num_rv, Pecos::DSR_VALUES, rsa);
1581   // Because h5py barfs on vlen datasets of vlen strings, we have to
1582   // use regular, fixed-sized datasets that are big enough to hold the
1583   // maximum number of elements.
1584   size_t max_num_elements = 0;
1585   IntArray num_elements;
1586   for(const auto &e : rsa) {
1587     num_elements.push_back(e.size());
1588     max_num_elements = (max_num_elements > e.size()) ? max_num_elements : e.size();
1589   }
1590   // Populate a 1D array with ALL the elements, including padding
1591   RealArray all_elements(num_rv * max_num_elements, REAL_DSET_FILL_VAL);
1592   for(int i = 0; i < num_rv; ++i)
1593     std::copy(rsa[i].begin(), rsa[i].end(), &all_elements[i*max_num_elements]);
1594 
1595   std::vector<VariableParametersField> fields = {
1596     VariableParametersField("num_elements", ResultsOutputType::INTEGER),
1597     VariableParametersField("elements", ResultsOutputType::REAL, {max_num_elements}),
1598   };
1599   IntArray dims = {int(num_rv)};
1600   hdf5Stream->create_empty_dataset(location, dims, fields);
1601   hdf5Stream->set_vector_scalar_field(location, num_elements, "num_elements");
1602   hdf5Stream->set_vector_vector_field(location, all_elements, max_num_elements, "elements");
1603 #else
1604   return;
1605 #endif
1606 }
1607 
1608 /// Store parameters for a single "domain" (e.g. all continuous variables)
store_parameters_for_domain(const String & root_group,const UShortMultiArrayConstView & types,const SizetMultiArrayConstView & ids,const StringMultiArrayView & labels,Pecos::MarginalsCorrDistribution * mvd_rep)1609 void EvaluationStore::store_parameters_for_domain(const String &root_group,
1610     const UShortMultiArrayConstView &types,  const SizetMultiArrayConstView &ids,
1611     const StringMultiArrayView &labels, Pecos::MarginalsCorrDistribution *mvd_rep) {
1612 #ifdef DAKOTA_HAVE_HDF5
1613   String scale_root = create_scale_root(root_group); // root_group already has
1614                                                      // variable_parameters
1615   // The loop below chunks up the set of variables by Dakota type (e.g. normal_uncertain)
1616   auto first_it = types.begin(); // iterator to first variable of this type
1617   size_t first_idx, last_idx; // Indexes to first and last variable of this type.
1618   while(first_it != types.end()) { // iterate until all variables have been processed
1619     // Find iterator to last variable of this type
1620     UShortArray to_find = {*first_it};
1621     auto last_it = std::find_end(first_it, types.end(), to_find.begin(), to_find.end());
1622     first_idx = std::distance(types.begin(), first_it);
1623     last_idx = std::distance(first_it, last_it) + first_idx;
1624     const unsigned short &this_type = *first_it;
1625     // parameters are obtained from the mvd_rep object by starting index and number of steps
1626     size_t start_rv = ids[first_idx] - 1;
1627     size_t num_rv = last_idx - first_idx + 1;
1628     bool store_scales = true; // it's safe to store scales; will be set to
1629                               // false if no datasets are created, which can happen
1630                               // if there's a unhandled type of variable
1631     String location = root_group, scale_location = scale_root;
1632 #define CALL_STORE_PARAMETERS_FOR(vtype)                               \
1633     location += #vtype;                                                \
1634     scale_location += #vtype;                                          \
1635     store_parameters_for_##vtype(start_rv, num_rv, location, mvd_rep); \
1636     break;
1637 
1638     switch(this_type) {
1639       case CONTINUOUS_DESIGN:
1640         CALL_STORE_PARAMETERS_FOR(continuous_design)
1641       case DISCRETE_DESIGN_RANGE:
1642         CALL_STORE_PARAMETERS_FOR(discrete_design_range)
1643       case DISCRETE_DESIGN_SET_INT:
1644         CALL_STORE_PARAMETERS_FOR(discrete_design_set_int)
1645       case DISCRETE_DESIGN_SET_STRING:
1646         CALL_STORE_PARAMETERS_FOR(discrete_design_set_string)
1647       case DISCRETE_DESIGN_SET_REAL:
1648         CALL_STORE_PARAMETERS_FOR(discrete_design_set_real)
1649       case NORMAL_UNCERTAIN:
1650         CALL_STORE_PARAMETERS_FOR(normal_uncertain);
1651       case UNIFORM_UNCERTAIN:
1652         CALL_STORE_PARAMETERS_FOR(uniform_uncertain);
1653       case LOGNORMAL_UNCERTAIN:
1654         CALL_STORE_PARAMETERS_FOR(lognormal_uncertain);
1655       case LOGUNIFORM_UNCERTAIN:
1656         CALL_STORE_PARAMETERS_FOR(loguniform_uncertain);
1657       case TRIANGULAR_UNCERTAIN:
1658         CALL_STORE_PARAMETERS_FOR(triangular_uncertain);
1659       case EXPONENTIAL_UNCERTAIN:
1660         CALL_STORE_PARAMETERS_FOR(exponential_uncertain);
1661       case BETA_UNCERTAIN:
1662         CALL_STORE_PARAMETERS_FOR(beta_uncertain);
1663       case GAMMA_UNCERTAIN:
1664         CALL_STORE_PARAMETERS_FOR(gamma_uncertain);
1665       case GUMBEL_UNCERTAIN:
1666         CALL_STORE_PARAMETERS_FOR(gumbel_uncertain);
1667       case FRECHET_UNCERTAIN:
1668         CALL_STORE_PARAMETERS_FOR(frechet_uncertain);
1669       case WEIBULL_UNCERTAIN:
1670         CALL_STORE_PARAMETERS_FOR(weibull_uncertain);
1671       case HISTOGRAM_BIN_UNCERTAIN:
1672         CALL_STORE_PARAMETERS_FOR(histogram_bin_uncertain);
1673       case POISSON_UNCERTAIN:
1674         CALL_STORE_PARAMETERS_FOR(poisson_uncertain);
1675       case BINOMIAL_UNCERTAIN:
1676         CALL_STORE_PARAMETERS_FOR(binomial_uncertain);
1677       case NEGATIVE_BINOMIAL_UNCERTAIN:
1678         CALL_STORE_PARAMETERS_FOR(negative_binomial_uncertain);
1679       case GEOMETRIC_UNCERTAIN:
1680         CALL_STORE_PARAMETERS_FOR(geometric_uncertain);
1681       case HYPERGEOMETRIC_UNCERTAIN:
1682         CALL_STORE_PARAMETERS_FOR(hypergeometric_uncertain);
1683       case HISTOGRAM_POINT_UNCERTAIN_INT:
1684         CALL_STORE_PARAMETERS_FOR(histogram_point_uncertain_int);
1685       case HISTOGRAM_POINT_UNCERTAIN_STRING:
1686         CALL_STORE_PARAMETERS_FOR(histogram_point_uncertain_string);
1687       case HISTOGRAM_POINT_UNCERTAIN_REAL:
1688         CALL_STORE_PARAMETERS_FOR(histogram_point_uncertain_real);
1689       case CONTINUOUS_INTERVAL_UNCERTAIN:
1690         CALL_STORE_PARAMETERS_FOR(continuous_interval_uncertain);
1691       case DISCRETE_INTERVAL_UNCERTAIN:
1692         CALL_STORE_PARAMETERS_FOR(discrete_interval_uncertain);
1693       case DISCRETE_UNCERTAIN_SET_INT:
1694         CALL_STORE_PARAMETERS_FOR(discrete_uncertain_set_int);
1695       case DISCRETE_UNCERTAIN_SET_STRING:
1696         CALL_STORE_PARAMETERS_FOR(discrete_uncertain_set_string);
1697       case DISCRETE_UNCERTAIN_SET_REAL:
1698         CALL_STORE_PARAMETERS_FOR(discrete_uncertain_set_real);
1699       case CONTINUOUS_STATE:
1700         CALL_STORE_PARAMETERS_FOR(continuous_state);
1701       case DISCRETE_STATE_RANGE:
1702         CALL_STORE_PARAMETERS_FOR(discrete_state_range);
1703       case DISCRETE_STATE_SET_INT:
1704         CALL_STORE_PARAMETERS_FOR(discrete_state_set_int);
1705       case DISCRETE_STATE_SET_STRING:
1706         CALL_STORE_PARAMETERS_FOR(discrete_state_set_string);
1707       case DISCRETE_STATE_SET_REAL:
1708         CALL_STORE_PARAMETERS_FOR(discrete_state_set_real);
1709       default:
1710         store_scales = false; // if no cases were executed, then there's no
1711                               // dataset to add scales to.
1712     }
1713     if(store_scales) {
1714       StringMultiArrayConstView these_labels(
1715               labels[boost::indices[idx_range(first_idx, last_idx+1)]]);
1716       SizetMultiArrayConstView these_ids(
1717               ids[boost::indices[idx_range(first_idx, last_idx+1)]]);
1718       // Create descriptors dimension scale
1719       String labels_location = scale_location + "/labels";
1720       hdf5Stream->store_vector(labels_location, these_labels);
1721       hdf5Stream->attach_scale(location, labels_location, "labels", 0);
1722       // Create ids dimension scale
1723       String ids_location = scale_location + "/ids";
1724       hdf5Stream->store_vector(ids_location, these_ids);
1725       hdf5Stream->attach_scale(location, ids_location, "ids", 0);
1726     }
1727     // Increment to the next type
1728     first_it = ++last_it;
1729   }
1730 #else
1731   return;
1732 #endif
1733 }
1734 
1735 
1736 /// Allocate storage for variable paramters
allocate_variable_parameters(const String & root_group,const Variables & variables,Pecos::MarginalsCorrDistribution * mvd_rep)1737 void EvaluationStore::allocate_variable_parameters(const String &root_group,
1738     const Variables &variables, Pecos::MarginalsCorrDistribution *mvd_rep) {
1739 
1740   String parameters_group = root_group + "metadata/variable_parameters/";
1741   if(variables.acv()) {
1742     store_parameters_for_domain(parameters_group,
1743         variables.all_continuous_variable_types(),
1744         variables.all_continuous_variable_ids(),
1745         variables.all_continuous_variable_labels(),
1746         mvd_rep);
1747   }
1748   if(variables.adiv()) {
1749     store_parameters_for_domain(parameters_group,
1750         variables.all_discrete_int_variable_types(),
1751         variables.all_discrete_int_variable_ids(),
1752         variables.all_discrete_int_variable_labels(),
1753         mvd_rep);
1754   }
1755   if(variables.adsv()) {
1756     store_parameters_for_domain(parameters_group,
1757         variables.all_discrete_string_variable_types(),
1758         variables.all_discrete_string_variable_ids(),
1759         variables.all_discrete_string_variable_labels(),
1760         mvd_rep);
1761   }
1762   if(variables.adrv()) {
1763     store_parameters_for_domain(parameters_group,
1764         variables.all_discrete_real_variable_types(),
1765         variables.all_discrete_real_variable_ids(),
1766         variables.all_discrete_real_variable_labels(),
1767         mvd_rep);
1768   }
1769 }
1770 
1771 
1772 /// Allocate storage for responses
allocate_response(const String & root_group,const Response & response,const DefaultSet & set_s)1773 void EvaluationStore::allocate_response(const String &root_group, const Response &response,
1774     const DefaultSet &set_s) {
1775 #ifdef DAKOTA_HAVE_HDF5
1776   String response_root_group = root_group + "responses/";
1777   String scale_root = create_scale_root(root_group);
1778   String response_scale_root = scale_root + "responses/";
1779   String eval_ids = scale_root + "evaluation_ids";
1780   int num_functions = int(set_s.numFunctions);
1781   // Store function labels
1782   String function_labels_name = response_scale_root + "function_descriptors";
1783   hdf5Stream->store_vector(function_labels_name, response.function_labels());
1784   // Create functions dataset
1785   String functions_name = response_root_group + "functions";
1786   hdf5Stream->create_empty_dataset(functions_name, {0, num_functions},
1787       ResultsOutputType::REAL, HDF5_CHUNK_SIZE, &REAL_DSET_FILL_VAL);
1788   hdf5Stream->attach_scale(functions_name, eval_ids, "evaluation_ids", 0);
1789   hdf5Stream->attach_scale(functions_name, function_labels_name, "responses", 1);
1790   // Create gradients dataset, if needed
1791   const ShortArray &asv = set_s.set.request_vector();
1792   int num_gradients = set_s.numGradients;
1793   int num_hessians =  set_s.numHessians;
1794   if(num_gradients) {
1795     int dvv_length = set_s.set.derivative_vector().size();
1796     String gradients_name = response_root_group + "gradients";
1797     hdf5Stream->create_empty_dataset(gradients_name, {0, num_gradients, dvv_length},
1798       ResultsOutputType::REAL, HDF5_CHUNK_SIZE, &REAL_DSET_FILL_VAL);
1799     hdf5Stream->attach_scale(gradients_name, eval_ids, "evaluation_ids", 0);
1800     if(num_gradients == num_functions)
1801       hdf5Stream->attach_scale(gradients_name, function_labels_name, "resposnes", 1);
1802     else { // mixed gradients
1803       StringArray gradient_labels;
1804       const StringArray & function_labels = response.function_labels();
1805       for(int i = 0; i < num_functions; ++i)
1806         if(asv[i] & 2)
1807           gradient_labels.push_back(function_labels[i]);
1808       String gradient_labels_name = response_scale_root + "gradient_descriptors";
1809       hdf5Stream->store_vector(gradient_labels_name, gradient_labels);
1810       hdf5Stream->attach_scale(gradients_name, gradient_labels_name, "responses", 1);
1811     }
1812   }
1813   if(num_hessians) {
1814     int dvv_length = set_s.set.derivative_vector().size();
1815     String hessians_name = response_root_group + "hessians";
1816     hdf5Stream->create_empty_dataset(hessians_name, {0, num_hessians, dvv_length, dvv_length},
1817       ResultsOutputType::REAL, HDF5_CHUNK_SIZE, &REAL_DSET_FILL_VAL);
1818     hdf5Stream->attach_scale(hessians_name, eval_ids, "evaluation_ids", 0);
1819     if(num_hessians == num_functions)
1820       hdf5Stream->attach_scale(hessians_name, function_labels_name, "resposnes", 1);
1821     else { // mixed hessians
1822       StringArray hessian_labels;
1823       const StringArray & function_labels = response.function_labels();
1824       for(int i = 0; i < num_functions; ++i)
1825         if(asv[i] & 4)
1826           hessian_labels.push_back(function_labels[i]);
1827       String hessian_labels_name = response_scale_root + "hessian_descriptors";
1828       hdf5Stream->store_vector(hessian_labels_name, hessian_labels);
1829       hdf5Stream->attach_scale(hessians_name, hessian_labels_name, "responses", 1);
1830     }
1831   }
1832 #else
1833   return;
1834 #endif
1835 }
1836 /// Allocate storage for metadata
allocate_metadata(const String & root_group,const Variables & variables,const Response & response,const DefaultSet & set_s,const String2DArray & an_comps)1837 void EvaluationStore::allocate_metadata(const String &root_group, const Variables &variables,
1838     const Response &response, const DefaultSet &set_s, const String2DArray &an_comps) {
1839 #ifdef DAKOTA_HAVE_HDF5
1840   String scale_root = create_scale_root(root_group);
1841   String metadata_root = root_group + "metadata/";
1842   String metadata_scale_root = scale_root + "metadata/";
1843   String eval_ids = scale_root + "evaluation_ids";
1844   const ShortArray &asv = set_s.set.request_vector();
1845   const SizetArray &dvv = set_s.set.derivative_vector();
1846   const int &num_functions = set_s.numFunctions;
1847   int num_deriv_vars = dvv.size();
1848   // ASV
1849   String asv_name = metadata_root + "active_set_vector";
1850   hdf5Stream->create_empty_dataset(asv_name, {0, num_functions}, ResultsOutputType::INTEGER, HDF5_CHUNK_SIZE);
1851   hdf5Stream->attach_scale(asv_name, eval_ids, "evaluation_ids", 0);
1852   hdf5Stream->attach_scale(asv_name, scale_root+"responses/function_descriptors", "responses", 1);
1853   hdf5Stream->store_vector(metadata_scale_root + "default_asv", asv);
1854   hdf5Stream->attach_scale(asv_name, metadata_scale_root + "default_asv", "default_active_set_vector", 1);
1855   // DVV
1856   // only create a DVV dataset when gradients or hessians are available.
1857 
1858   if(set_s.numGradients || set_s.numHessians) {
1859     String dvv_name = metadata_root + "derivative_variables_vector";
1860     hdf5Stream->create_empty_dataset(dvv_name, {0, num_deriv_vars}, ResultsOutputType::INTEGER,  HDF5_CHUNK_SIZE);
1861     hdf5Stream->attach_scale(dvv_name, eval_ids, "evaluation_ids", 0);
1862     // The ids are 1-based, not 0-based
1863     StringMultiArrayConstView cont_labels = variables.all_continuous_variable_labels();
1864     hdf5Stream->store_vector(metadata_scale_root + "dv_descriptors", cont_labels);
1865     hdf5Stream->attach_scale(dvv_name, metadata_scale_root + "dv_descriptors", "variables", 1);
1866     hdf5Stream->store_vector(metadata_scale_root + "dvv", dvv);
1867     hdf5Stream->attach_scale(dvv_name, metadata_scale_root + "dvv", "variable_ids", 1);
1868   }
1869   // Analysis Components
1870   // TODO: these perhaps should be stored as a 2D dataset, with shape
1871   // (analysis drivers x components per driver)
1872   if(an_comps.size()) {
1873     StringArray all_comps;
1874     for(const auto &v : an_comps)
1875       all_comps.insert(all_comps.end(), v.begin(), v.end());
1876     hdf5Stream->store_vector(metadata_root + "analysis_components", all_comps);
1877   }
1878 #else
1879   return;
1880 #endif
1881 }
1882 
1883 
store_variables(const String & root_group,const Variables & variables)1884 void EvaluationStore::store_variables(const String &root_group, const Variables &variables) {
1885 #ifdef DAKOTA_HAVE_HDF5
1886   String variables_root = root_group + "variables/";
1887   if(variables.acv())
1888     hdf5Stream->append_vector(variables_root+"continuous",
1889         variables.all_continuous_variables());
1890   if(variables.adiv())
1891     hdf5Stream->append_vector(variables_root+"discrete_integer",
1892         variables.all_discrete_int_variables());
1893   if(variables.adsv())
1894     hdf5Stream->append_vector(variables_root+"discrete_string",
1895         variables.all_discrete_string_variables());
1896   if(variables.adrv())
1897     hdf5Stream->append_vector(variables_root+"discrete_real",
1898         variables.all_discrete_real_variables());
1899 #else
1900   return;
1901 #endif
1902 }
1903 
store_response(const String & root_group,const int & resp_idx,const Response & response,const DefaultSet & default_set_s)1904 void EvaluationStore::store_response(const String &root_group, const int &resp_idx,
1905     const Response &response, const DefaultSet &default_set_s) {
1906 #ifdef DAKOTA_HAVE_HDF5
1907   String response_root = root_group + "responses/";
1908   const ActiveSet &set = response.active_set();
1909   const ShortArray &asv = set.request_vector();
1910   const SizetArray &dvv = set.derivative_vector();
1911   const size_t num_functions = asv.size();
1912   const ShortArray &default_asv = default_set_s.set.request_vector();
1913   const size_t num_default_deriv_vars = default_set_s.set.derivative_vector().size();
1914   const SizetArray &default_dvv = default_set_s.set.derivative_vector();
1915   // function values
1916   bool has_functions = bool(default_set_s.numFunctions);
1917   String functions_name = response_root + "functions";
1918   if(has_functions) {
1919     // because of NaN fill value, we have to do some legwork. If all of the function
1920     // values are set, we can write them all without making a copy. If some of them are
1921     // set, we have to make a copy (initizlied to NaN and write just the values that are
1922     // present. If none are set, we do nothing, because tThe dataset by default has NaN
1923     // fill values.
1924     const RealVector &f = response.function_values();
1925     int num1 = std::count_if(asv.begin(), asv.end(), [](const short &a){return a & 1;});
1926     if(num1 == num_functions) {
1927       hdf5Stream->set_vector(functions_name, f, resp_idx);
1928     } else if(num1 > 0) {
1929       RealVector f_copy(num_functions, false /*don't zero out */);
1930       f_copy = REAL_DSET_FILL_VAL;
1931       for(int i = 0; i < num_functions; ++i) {
1932         if(asv[i] & 1) f_copy[i] = f[i];
1933       }
1934       hdf5Stream->set_vector(functions_name, f_copy, resp_idx);
1935     } //else, none are set, do nothing.
1936   }
1937   // Gradients. Gradients and hessians are more complicated than function values for two reasons.
1938   // 1) The dataset was allocated to accomodate the maximum dvv length, and the current
1939   //    dvv length may be shorter
1940   // 2) The dataset was sized to hold gradients only for responses for which they are
1941   //    available (i.e. mixed gradients), while Dakota (seems to) allocate space for every
1942   //    response.
1943   const int &num_gradients = default_set_s.numGradients;
1944   String gradients_name = response_root + "gradients";
1945   IntVector dvv_idx; // indexes into the full gradient matrix of the deriv vars. Declare at this scope
1946                      // so it can be reused for Hessian storage, if needed
1947   if(num_gradients && std::any_of(asv.begin(), asv.end(), [](const short &a){return a & 2;})) {
1948     // First do the simple case where the dvv is the same length as default dvv and gradients are
1949     // not mixed.
1950     if(dvv.size() == num_default_deriv_vars && num_gradients == num_functions) {
1951         hdf5Stream->set_matrix(gradients_name, response.function_gradients(), resp_idx, true /*transpose*/);
1952     } else {
1953       // Need to grab the gradients only for the subset of responses that can have them, and then
1954       // for those gradients, grab the components that are in the dvv
1955       IntArray gradient_idxs; // Indexes of responses that can have gradients
1956       for(int i = 0; i < num_functions; ++i)
1957         if(default_asv[i] & 2)
1958           gradient_idxs.push_back(i);
1959       const int num_default_gradients = gradient_idxs.size();
1960       RealMatrix full_gradients(num_default_deriv_vars, num_default_gradients, false /*don't zero out*/);
1961       full_gradients = REAL_DSET_FILL_VAL;
1962       dvv_idx.resize(dvv.size());
1963       for(int i = 0; i < dvv.size(); ++i)
1964         dvv_idx[i] = find_index(default_dvv, dvv[i]);
1965       for(int i = 0; i < num_default_gradients; ++i) {
1966         const RealVector col = response.function_gradient_view(gradient_idxs[i]);
1967         for(int j = 0; j < dvv.size(); ++j) {
1968           full_gradients(dvv_idx[j], i) = col(j);
1969         }
1970       }
1971       hdf5Stream->set_matrix(gradients_name, full_gradients, resp_idx, true /*transpose */);
1972     }
1973   }
1974   // Hessians. Same bookkeeping needs to be done here as for gradients. Addditionally, the
1975   // hessians have to be converted from symmetric matrices to regular ones. (Probably should
1976   // consider just storing them as row or column major 1D arrays)
1977   const int &num_hessians = default_set_s.numHessians;
1978   String hessians_name = response_root + "hessians";
1979   if(num_hessians && std::any_of(asv.begin(), asv.end(), [](const short &a){return a & 4;})) {
1980     // First do the simple case where the dvv is the same length as default dvv, and
1981     // hessians are not mixed.
1982     if(dvv.size() == num_default_deriv_vars && num_hessians == num_functions) {
1983       RealMatrixArray full_hessians;
1984       for(const auto &m : response.function_hessians()) {
1985         RealMatrix full_hessian(num_default_deriv_vars, num_default_deriv_vars, false /*don't zero out*/);
1986         for(int i = 0; i < num_default_deriv_vars; ++i) {
1987           full_hessian(i, i) = m(i, i);
1988           for(int j = i+1; j < num_default_deriv_vars; ++j) {
1989             full_hessian(j,i) = full_hessian(i,j) = m(i,j);
1990           }
1991         }
1992         full_hessians.push_back(full_hessian);
1993       }
1994       hdf5Stream->set_vector_matrix(hessians_name, full_hessians, resp_idx, true /*transpose (for efficiency)*/);
1995     } else {
1996       IntArray hessian_idxs; // Indexes of responses that can have hessians
1997       for(int i = 0; i < num_functions; ++i)
1998         if(default_asv[i] & 4)
1999           hessian_idxs.push_back(i);
2000       int num_default_hessians = hessian_idxs.size();
2001       RealMatrixArray full_hessians;  //(num_default_deriv_vars, num_default_gradients);
2002       if(dvv_idx.empty()) { // not yet populated by gradient storage block
2003         dvv_idx.resize(dvv.size());
2004         for(int i = 0; i < dvv.size(); ++i)
2005           dvv_idx[i] = find_index(default_dvv, dvv[i]);
2006       }
2007       for(int mi = 0; mi < num_default_hessians; ++mi) {
2008         RealMatrix full_hessian(num_default_deriv_vars, num_default_deriv_vars, false /*don't zero out*/);
2009         full_hessian = REAL_DSET_FILL_VAL;
2010         const RealSymMatrix &resp_hessian = response.function_hessian_view(hessian_idxs[mi]);
2011         for(int i = 0; i < dvv.size(); ++i) {
2012           const int &dvv_i = dvv_idx[i];
2013           full_hessian(dvv_i, dvv_i) = resp_hessian(i,i);
2014           for(int j = i+1; j < dvv.size(); ++j) {
2015             const int &dvv_j = dvv_idx[j];
2016             full_hessian(dvv_j, dvv_i) = full_hessian(dvv_i, dvv_j) = resp_hessian(i, j);
2017           }
2018         }
2019         full_hessians.push_back(full_hessian);
2020       }
2021       hdf5Stream->set_vector_matrix(hessians_name, full_hessians, resp_idx, true /*transpose */);
2022     }
2023   }
2024 #else
2025   return;
2026 #endif
2027 }
2028 
store_metadata(const String & root_group,const ActiveSet & set,const DefaultSet & default_set_s)2029 void EvaluationStore::store_metadata(const String &root_group, const ActiveSet &set,
2030         const DefaultSet &default_set_s) {
2031 #ifdef DAKOTA_HAVE_HDF5
2032   String metadata_root = root_group + "metadata/";
2033   // ASV
2034   hdf5Stream->append_vector(metadata_root + "active_set_vector", set.request_vector());
2035   // DVV. The dvv in set may be shorter than the default one, and so it has to be aligned
2036   // by ID.
2037   const SizetArray &default_dvv = default_set_s.set.derivative_vector();
2038   const ShortArray &default_asv = default_set_s.set.request_vector();
2039   // The DVV dataset doesn't exist unless gradients or hessians can be provided
2040   if(default_set_s.numGradients || default_set_s.numHessians) {
2041     const SizetArray &dvv = set.derivative_vector();
2042     // vector that will be apppended to the dataset. "bits" defaulted to 0 ("off")
2043     IntArray dvv_row(default_dvv.size(), 0);
2044     // Most of the time, all possible derivative variables will be "active" (the lengths of the
2045     // current and default DVV will match), so we don't need to examine the DVV entry by entry.
2046     if(dvv.size() == default_dvv.size())
2047       std::fill(dvv_row.begin(), dvv_row.end(), 1);
2048     else {
2049       // This logic assumes that the entries in dvv and default_dvv are sorted in ascending order.
2050       // It iterates over the entries of the current dvv, and for each, advances through the default
2051       // dvv until the entry is found. It then sets the bit for that entry and goes to the next one
2052       // in the current dvv.
2053       int di = 0;
2054       for(int si = 0; si < dvv.size(); ++si) {
2055         for(; di < default_dvv.size(); ++di) {
2056           if(dvv[si] == default_dvv[di]) {
2057             dvv_row[di] = 1;
2058             ++di;
2059             break;
2060           }
2061         }
2062       }
2063     }
2064     hdf5Stream->append_vector(metadata_root + "derivative_variables_vector", dvv_row);
2065   }
2066 #else
2067   return;
2068 #endif
2069 }
2070 
model_selection(const unsigned short & selection)2071 void EvaluationStore::model_selection(const unsigned short &selection) {
2072   modelSelection = selection;
2073 }
2074 
interface_selection(const unsigned short & selection)2075 void EvaluationStore::interface_selection(const unsigned short &selection) {
2076   interfaceSelection = selection;
2077 }
2078 
model_active(const String & model_id)2079 bool EvaluationStore::model_active(const String &model_id) {
2080   if(modelSelection == MODEL_EVAL_STORE_ALL)
2081     return true;
2082   else if(modelSelection == MODEL_EVAL_STORE_NONE)
2083     return false;
2084   else // MODEL_EVAL_STORE_TOP_METHOD and ALL_METHODS
2085     return sourceModels.find(model_id) != sourceModels.end();
2086 }
2087 
interface_active(const String & iface_type)2088 bool EvaluationStore::interface_active(const String &iface_type) {
2089   if(interfaceSelection == INTERF_EVAL_STORE_ALL)
2090     return true;
2091   else if(interfaceSelection == INTERF_EVAL_STORE_NONE)
2092     return false;
2093   else if(iface_type == "approximation") // simulation only
2094     return false;
2095   else
2096     return true;
2097 }
2098 
2099 } // Dakota namespace
2100 
2101 
2102