1 /*  _______________________________________________________________________
2 
3     DAKOTA: Design Analysis Kit for Optimization and Terascale Applications
4     Copyright 2014-2020 National Technology & Engineering Solutions of Sandia, LLC (NTESS).
5     This software is distributed under the GNU Lesser General Public License.
6     For more information, see the README file in the top Dakota directory.
7     _______________________________________________________________________ */
8 
9 /*
10 ===============================================================================
11     PROJECT:
12 
13         Genetic Algorithm for Sandia National Laboratories
14 
15     CONTENTS:
16 
17         Implementation of class JEGAOptimizer.
18 
19     NOTES:
20 
21         See notes of JEGAOptimizer.hpp.
22 
23     PROGRAMMERS:
24 
25         John Eddy (jpeddy@sandia.gov) (JE)
26         Brian Adams (briadam@sandia.gov) (BA)
27 
28     ORGANIZATION:
29 
30         Sandia National Laboratories
31 
32     COPYRIGHT:
33 
34         This library is free software; you can redistribute it and/or
35         modify it under the terms of the GNU Lesser General Public
36         License as published by the Free Software Foundation; either
37         version 2.1 of the License, or (at your option) any later version.
38 
39         This library is distributed in the hope that it will be useful,
40         but WITHOUT ANY WARRANTY; without even the implied warranty of
41         MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
42         GNU Lesser General Public License for more details.
43 
44         For a copy of the GNU Lesser General Public License, write to:
45             Free Software Foundation, Inc.
46             59 Temple Place, Suite 330
47             Boston, MA 02111-1307 USA
48 
49     VERSION:
50 
51         2.0.0
52 
53     CHANGES:
54 
55         Mon Jun 09 09:48:34 2003 - Original Version (JE)
56         Wed Dec 07 15:00:00 2005 - Added ParameterDatabase subclass to wrap
57                                    ProblemDescDB for dependency removal (JE)
58         Wed Mar 29 12:00:00 2006 - Adopted use of JEGA front end project to
59                                    reduce code duplication and keep Dakota
60                                    up to speed with the latest in JEGA.
61 
62 ===============================================================================
63 */
64 
65 
66 
67 
68 /*
69 ===============================================================================
70 Document This File
71 ===============================================================================
72 */
73 /** \file
74  * \brief Contains the implementation of the JEGAOptimizer class.
75  */
76 
77 
78 
79 
80 /*
81 ===============================================================================
82 Includes
83 ===============================================================================
84 */
85 // JEGAConfig.hpp should be the first include in all JEGA files.
86 #include <../Utilities/include/JEGAConfig.hpp>
87 #include <../Utilities/include/Logging.hpp>
88 
89 // Standard includes.
90 
91 // JEGA core includes.
92 #include <GeneticAlgorithm.hpp>
93 #include <GeneticAlgorithmEvaluator.hpp>
94 #include <GeneticAlgorithmInitializer.hpp>
95 #include <OperatorGroups/AllOperators.hpp>
96 
97 // SOGA-specific API
98 #include <../SOGA/include/SOGA.hpp>
99 
100 // JEGA front end includes.
101 #include <../FrontEnd/Core/include/Driver.hpp>
102 #include <../FrontEnd/Core/include/ProblemConfig.hpp>
103 #include <../FrontEnd/Core/include/AlgorithmConfig.hpp>
104 #include <../FrontEnd/Core/include/EvaluatorCreator.hpp>
105 
106 // Dakota includes.
107 #include <JEGAOptimizer.hpp>
108 #include <ProblemDescDB.hpp>
109 #include <MarginalsCorrDistribution.hpp>
110 
111 // Eddy utility includes.
112 #include <utilities/include/EDDY_DebugScope.hpp>
113 
114 // JEGA utility includes.
115 #include <../Utilities/include/DesignGroup.hpp>
116 #include <../Utilities/include/ConstraintInfo.hpp>
117 #include <../Utilities/include/ParameterExtractor.hpp>
118 #include <../Utilities/include/BasicParameterDatabaseImpl.hpp>
119 #include <../Utilities/include/MultiObjectiveStatistician.hpp>
120 #include <../Utilities/include/SingleObjectiveStatistician.hpp>
121 
122 #include <algorithm>
123 #include <sstream>
124 
125 /*
126 ===============================================================================
127 Namespace Using Directives
128 ===============================================================================
129 */
130 using namespace std;
131 using namespace JEGA::Logging;
132 using namespace JEGA::FrontEnd;
133 using namespace eddy::utilities;
134 using namespace JEGA::Utilities;
135 using namespace JEGA::Algorithms;
136 
137 /*
138 ===============================================================================
139 Begin Namespace
140 ===============================================================================
141 */
142 namespace Dakota {
143 
144 
145 
146 
147 /*
148 ===============================================================================
149 File Scope Helper Functions
150 ===============================================================================
151 */
152 /// Creates a string from the argument \a val using an ostringstream.
153 /**
154  * This only gets used in this file and is only ever called with ints so no
155  * error checking is in place.
156  *
157  * \param val The value of type T to convert to a string.
158  * \return The string representation of \a val created using an ostringstream.
159  */
160 template <typename T>
161 string
asstring(const T & val)162 asstring(
163     const T& val
164     )
165 {
166     EDDY_FUNC_DEBUGSCOPE
167     ostringstream ostr;
168     ostr << val;
169     return ostr.str();
170 }
171 
172 
173 /// An evaluator specialization that knows how to interact with Dakota.
174 /**
175  * This evaluator knows how to use the model to do evaluations both in
176  * synchronous and asynchronous modes.
177  */
178 class JEGAOptimizer::Evaluator :
179     public GeneticAlgorithmEvaluator
180 {
181     /*
182     ===========================================================================
183     Member Data Declarations
184     ===========================================================================
185     */
186     private:
187 
188         /// The Model known by this evaluator.
189         /**
190          * It is through this model that evaluations will take place.
191          */
192         Model& _model;
193 
194     /*
195     ===========================================================================
196     Public Methods
197     ===========================================================================
198     */
199     public:
200 
201          /// Returns the proper name of this operator.
202         /**
203          * \return The string "DAKOTA JEGA Evaluator".
204          */
205         static
206         const std::string&
Name()207         Name(
208             )
209         {
210             EDDY_FUNC_DEBUGSCOPE
211             static const string ret("DAKOTA JEGA Evaluator");
212             return ret;
213         }
214 
215         /// Returns a full description of what this operator does and how.
216         /**
217          * The returned text is:
218          * \verbatim
219             This evaluator uses Sandia's DAKOTA optimization
220             software to evaluate the passed in Designs.  This
221             makes it possible to take advantage of the fact that
222             DAKOTA is designed to run on massively parallel machines.
223            \endverbatim.
224          *
225          * \return A description of the operation of this operator.
226          */
227         static
228         const std::string&
Description()229         Description(
230             )
231         {
232             EDDY_FUNC_DEBUGSCOPE
233             static const string ret(
234                 "This evaluator uses Sandia's DAKOTA optimization software to "
235                 "evaluate the passed in Designs.  This makes it possible to "
236                 "take advantage of the fact that DAKOTA is designed to run on "
237                 "massively parallel machines."
238                 );
239             return ret;
240         }
241 
242     /*
243     ===========================================================================
244     Subclass Visible Methods
245     ===========================================================================
246     */
247     protected:
248 
249         /**
250          * \brief This method fills \a intoCont, \a intoDiscInt and
251          * \a intoDiscReal appropriately using the values of \a from.
252          *
253          * The discrete integer design variable values are placed in
254          * \a intoDiscInt, the discrete real design variable values are placed
255          * in \a intoDiscReal, and the continuum are placed into \a intoCont.
256          * The values are written into the vectors from the beginning so any
257          * previous contents of the vectors will be overwritten.
258          *
259          * \param from The Design class object from which to extract the
260          *             discrete design variable values.
261          * \param intoDiscInt The vector into which to place the extracted
262          *             discrete integer values.
263          * \param intoDiscReal The vector into which to place the extracted
264          *             discrete real values.
265          * \param intoCont The vector into which to place the extracted
266          *             continuous values.
267          */
268         void
269         SeparateVariables(
270             const Design& from,
271             RealVector& intoCont,
272             IntVector&  intoDiscInt,
273             RealVector& intoDiscReal,
274 	    StringMultiArray& intoDiscString
275             ) const;
276 
277         /**
278          * \brief Records the computed objective and constraint function values
279          *        into \a into.
280          *
281          * This method takes the response values stored in \a from and properly
282          * transfers them into the \a into design.
283          *
284          * The response vector \a from is expected to contain values for each
285          * objective function followed by values for each non-linear constraint
286          * in the order in which the info objects were loaded into the target
287          * by the optimizer class.
288          *
289          * \param from The vector of responses to install into \a into.
290          * \param into The Design to which the responses belong and into which
291          *             they must be written.
292          */
293         void
294         RecordResponses(
295             const RealVector& from,
296             Design& into
297             ) const;
298 
299         /// Returns the number of non-linear constraints for the problem.
300         /**
301          * This is computed by adding the number of non-linear equality
302          * constraints to the number of non-linear inequality constraints.
303          * These values are obtained from the model.
304          *
305          * \return The total number of non-linear constraints.
306          */
307         std::size_t
GetNumberNonLinearConstraints() const308         GetNumberNonLinearConstraints(
309             ) const
310         {
311             EDDY_FUNC_DEBUGSCOPE
312             return this->_model.num_nonlinear_eq_constraints() +
313                    this->_model.num_nonlinear_ineq_constraints();
314         }
315 
316         /// Returns the number of linear constraints for the problem.
317         /**
318          * This is computed by adding the number of linear equality
319          * constraints to the number of linear inequality constraints.
320          * These values are obtained from the model.
321          *
322          * \return The total number of linear constraints.
323          */
324         std::size_t
GetNumberLinearConstraints() const325         GetNumberLinearConstraints(
326             ) const
327         {
328             EDDY_FUNC_DEBUGSCOPE
329             return this->_model.num_linear_eq_constraints() +
330                    this->_model.num_linear_ineq_constraints();
331         }
332 
333     /*
334     ===========================================================================
335     Subclass Overridable Methods
336     ===========================================================================
337     */
338     public:
339 
340         /// Does evaluation of each design in \a group.
341         /**
342          * This method uses the Model known by this class to get Designs
343          * evaluated.  It properly formats the Design class information in a
344          * way that Dakota will understand and then interprets the Dakota
345          * results and puts them back into the Design class object.  It
346          * respects the asynchronous flag in the Model so evaluations may
347          * occur synchronously or asynchronously.
348          *
349          * Prior to evaluating a Design, this class checks to see if it
350          * is marked as already evaluated.  If it is, then the evaluation
351          * of that Design is not carried out.  This is not strictly
352          * necessary because Dakota keeps track of evaluated designs and
353          * does not re-evaluate.  An exception is the case of a population
354          * read in from a file complete with responses where Dakota is
355          * unaware of the evaluations.
356          *
357          * \param group The group of Design class objects to be evaluated.
358          * \return true if all evaluations completed and false otherwise.
359          */
360         virtual
361         bool
362         Evaluate(
363             DesignGroup& group
364             );
365 
366         /// This method cannot be used!!
367         /**
368          * This method does nothing and cannot be called.  This is because in
369          * the case of asynchronous evaluation, this method would be unable
370          * to conform.  It would require that each evaluation be done in a
371          * synchronous fashion.
372          *
373          * \param des A Design that would be evaluated if this
374          *            method worked.
375          * \return Would return true if the Design were evaluated and false
376          *         otherwise.  Never actually returns here.  Issues a fatal
377          *         error.  Otherwise, it would always return false.
378          */
379         virtual
380         bool
Evaluate(Design & des)381         Evaluate(
382             Design& des
383             )
384         {
385             EDDY_FUNC_DEBUGSCOPE
386 
387             JEGALOG_II_F(this->GetLogger(), this,
388                 text_entry(lfatal(),
389                     this->GetName() +
390                     ": You cannot use Evaluate(Design&) with this "
391                     "evaluator...ever.")
392                 )
393             return false;
394         }
395 
396         /// Returns the proper name of this operator.
397         /**
398          * \return See Name().
399          */
400         virtual
401         std::string
GetName() const402         GetName(
403             ) const
404         {
405             EDDY_FUNC_DEBUGSCOPE
406             return Evaluator::Name();
407         }
408 
409         /// Returns a full description of what this operator does and how.
410         /**
411          * \return See Description().
412          */
413         virtual
414         std::string
GetDescription() const415         GetDescription(
416             ) const
417         {
418             EDDY_FUNC_DEBUGSCOPE
419             return Evaluator::Description();
420         }
421 
422         /**
423          * \brief Creates and returns a pointer to an exact duplicate of this
424          *        operator.
425          *
426          * \param algorithm The GA for which the clone is being created.
427          * \return A clone of this operator.
428          */
429         virtual
430         GeneticAlgorithmOperator*
Clone(GeneticAlgorithm & algorithm) const431         Clone(
432             GeneticAlgorithm& algorithm
433             ) const
434         {
435             EDDY_FUNC_DEBUGSCOPE
436             return new Evaluator(*this, algorithm, _model);
437         }
438 
439 
440     /*
441     ===========================================================================
442     Structors
443     ===========================================================================
444     */
445     public:
446 
447         /**
448          * \brief Constructs a Evaluator for use by \a  algorithm.
449          *
450          * The optimizer is needed for purposes of variable scaling.
451          *
452          * \param algorithm The GA for which the new evaluator is to be used.
453          * \param model The model through which evaluations will be done.
454          */
Evaluator(GeneticAlgorithm & algorithm,Model & model)455         Evaluator(
456             GeneticAlgorithm& algorithm,
457             Model& model
458             ) :
459                 GeneticAlgorithmEvaluator(algorithm),
460                 _model(model)
461         {
462             EDDY_FUNC_DEBUGSCOPE
463         }
464 
465         /**
466          * \brief Copy constructs a Evaluator.
467          *
468          * \param copy The evaluator from which properties are to be duplicated
469          *             into this.
470          */
Evaluator(const Evaluator & copy)471         Evaluator(
472             const Evaluator& copy
473             ) :
474                 GeneticAlgorithmEvaluator(copy),
475                 _model(copy._model)
476         {
477             EDDY_FUNC_DEBUGSCOPE
478         }
479 
480         /**
481          * \brief Copy constructs a Evaluator for use by \a algorithm.
482          *
483          * The optimizer is needed for purposes of variable scaling.
484          *
485          * \param copy The existing Evaluator from which to retrieve
486          *             properties.
487          * \param algorithm The GA for which the new evaluator is to be used.
488          * \param model The model through which evaluations will be done.
489          */
Evaluator(const Evaluator & copy,GeneticAlgorithm & algorithm,Model & model)490         Evaluator(
491             const Evaluator& copy,
492             GeneticAlgorithm& algorithm,
493             Model& model
494             ) :
495                 GeneticAlgorithmEvaluator(copy, algorithm),
496                 _model(model)
497         {
498             EDDY_FUNC_DEBUGSCOPE
499         }
500 
501     private:
502 
503         /// This constructor has no implementation and cannot be used.
504         /**
505          * This constructor can never be used.  It is provided so that this
506          * operator can still be registered in an operator registry even though
507          * it can never be instantiated from there.
508          *
509          * \param algorithm The GA for which the new evaluator is to be used.
510          */
511         Evaluator(
512             GeneticAlgorithm& algorithm
513             );
514 
515 
516 }; // class JEGAOptimizer::Evaluator
517 
518 /**
519  * \brief A specialization of the JEGA::FrontEnd::EvaluatorCreator that
520  *        creates a new instance of a Evaluator.
521  */
522 class JEGAOptimizer::EvaluatorCreator :
523     public JEGA::FrontEnd::EvaluatorCreator
524 {
525     /*
526     ===========================================================================
527     Member Data Declarations
528     ===========================================================================
529     */
530     private:
531 
532         /**
533          * \brief The user defined model to be passed to the constructor of the
534          *        Evaluator.
535          */
536         Model& _theModel;
537 
538     /*
539     ===========================================================================
540     Subclass Overridable Methods
541     ===========================================================================
542     */
543     public:
544 
545         /// Overriden to return a newly created Evaluator.
546         /**
547          * The GA will assume ownership of the evaluator so we needn't worry
548          * about keeping track of it for destruction.  The additional
549          * parameters needed by the Evaluator are stored as members of this
550          * class at construction time.
551          *
552          * \param alg The GA for which the evaluator is to be created.
553          * \return A pointer to a newly created Evaluator.
554          */
555         virtual
556         GeneticAlgorithmEvaluator*
CreateEvaluator(GeneticAlgorithm & alg)557         CreateEvaluator(
558             GeneticAlgorithm& alg
559             )
560         {
561             EDDY_FUNC_DEBUGSCOPE
562 	      return new Evaluator(alg, _theModel);
563         }
564 
565     /*
566     ===========================================================================
567     Structors
568     ===========================================================================
569     */
570     public:
571 
572         /**
573          * \brief Constructs an EvaluatorCreator using the supplied model.
574          *
575          * \param theModel The Dakota::Model this creator will pass to the
576          *                 created evaluator.
577          */
EvaluatorCreator(Model & theModel)578         EvaluatorCreator(
579             Model& theModel
580             ) :
581                 _theModel(theModel)
582         {
583             EDDY_FUNC_DEBUGSCOPE
584         }
585 
586 }; // class JEGAOptimizer::EvaluatorCreator
587 
588 /**
589  * \brief A subclass of the JEGA front end driver that exposes the
590  *        individual protected methods to execute the algorithm.
591  *
592  * This is necessary because DAKOTA requires that all problem
593  * information be extracted from the problem description DB at the
594  * time of Optimizer construction and the front end does it all in
595  * the execute algorithm method which must be called in core_run.
596  */
597 class JEGAOptimizer::Driver :
598     public JEGA::FrontEnd::Driver
599 {
600     /*
601     ===========================================================================
602     Member Data Declarations
603     ===========================================================================
604     */
605     private:
606 
607     /*
608     ===========================================================================
609     Public Methods
610     ===========================================================================
611     */
612     public:
613 
614         /**
615          * \brief Reads all required data from the problem description database
616          *        stored in the supplied algorithm config.
617          *
618          * The returned GA is fully configured and ready to be run.  It must
619          * also be destroyed at some later time.  You MUST call
620          * DestroyAlgorithm for this purpose.  Failure to do so could result
621          * in a memory leak and an eventual segmentation fault!  Be sure to
622          * call DestroyAlgorithm prior to destroying the algorithm config that
623          * was used to create it!
624          *
625          * This is just here to expose the base class method to users.
626          *
627          * \param algConfig The fully loaded configuration object containing
628          *                  the database of parameters for the algorithm to be
629          *                  run on the known problem.
630          * \return The fully configured and loaded GA ready to be run using
631          *         the PerformIterations method.
632          */
633         GeneticAlgorithm*
ExtractAllData(const AlgorithmConfig & algConfig)634         ExtractAllData(
635             const AlgorithmConfig& algConfig
636             )
637         {
638             return JEGA::FrontEnd::Driver::ExtractAllData(algConfig);
639         }
640 
641         /**
642          * \brief Performs the required iterations on the supplied GA.
643          *
644          * This includes the calls to AlgorithmInitialize and AlgorithmFinalize
645          * and logs some information if appropriate.
646          *
647          * This is just here to expose the base class method to users.
648          *
649          * \param theGA The GA on which to perform iterations.  This parameter
650          *              must be non-null.
651          * \return The final solutions reported by the supplied GA after all
652          *         iterations and call to AlgorithmFinalize.
653          */
654         DesignOFSortSet
PerformIterations(GeneticAlgorithm * theGA)655         PerformIterations(
656             GeneticAlgorithm* theGA
657             )
658         {
659             return JEGA::FrontEnd::Driver::PerformIterations(theGA);
660         }
661 
662         /**
663          * \brief Deletes the supplied GA.
664          *
665          * Use this method to destroy a GA after all iterations have been run.
666          * This method knows if the log associated with the GA was created here
667          * and needs to be destroyed as well or not.
668          *
669          * This is just here to expose the base class method to users.
670          *
671          * Be sure to use this prior to destoying the algorithm config object
672          * which contains the target.  The GA destructor needs the target to
673          * be in tact.
674          *
675          * \param theGA The algorithm that is no longer needed and thus must be
676          *              destroyed.
677          */
678         void
DestroyAlgorithm(GeneticAlgorithm * theGA)679         DestroyAlgorithm(
680             GeneticAlgorithm* theGA
681             )
682         {
683             JEGA::FrontEnd::Driver::DestroyAlgorithm(theGA);
684         }
685 
686     /*
687     ===========================================================================
688     Structors
689     ===========================================================================
690     */
691     public:
692 
693         /// Default constructs a Driver
694         /**
695          * \param probConfig The definition of the problem to be solved by this
696          *                   Driver whenever ExecuteAlgorithm is called.
697          *
698          * The problem can be solved in multiple ways by multiple algorithms
699          * even using multiple different evaluators by issuing multiple calls
700          * to ExecuteAlgorithm with different AlgorithmConfigs.
701          */
Driver(const ProblemConfig & probConfig)702         Driver(
703             const ProblemConfig& probConfig
704             ) :
705                 JEGA::FrontEnd::Driver(probConfig)
706         {
707             EDDY_FUNC_DEBUGSCOPE
708         }
709 
710 }; // class JEGAOptimizer::Driver
711 
712 
713 /*
714 ===============================================================================
715 Static Member Data Definitions
716 ===============================================================================
717 */
718 
719 
720 
721 
722 
723 /*
724 ===============================================================================
725 Mutators
726 ===============================================================================
727 */
728 
729 
730 
731 
732 
733 
734 
735 
736 /*
737 ===============================================================================
738 Accessors
739 ===============================================================================
740 */
741 
742 
743 
744 
745 
746 /*
747 ===============================================================================
748 Public Methods
749 ===============================================================================
750 */
751 void
core_run()752 JEGAOptimizer::core_run(
753     )
754 {
755     EDDY_FUNC_DEBUGSCOPE
756 
757     // Load up an algorithm config and a problem config.
758     ProblemConfig pConfig;
759     LoadProblemConfig(pConfig);
760 
761     AlgorithmConfig aConfig(*this->_theEvalCreator, *this->_theParamDB);
762     this->LoadAlgorithmConfig(aConfig);
763 
764     // retrieve parameter database for repeated use (is actaully _theParamDB)
765     ParameterDatabase& pdb = aConfig.GetParameterDB();
766 
767     // Create a new driver for JEGA.
768     JEGAOptimizer::Driver driver(pConfig);
769 
770     // Get the algorithm separately (rather than simply running the current
771     // configuration) in case we need to change the initializer.
772     GeneticAlgorithm* theGA = driver.ExtractAllData(aConfig);
773 
774     // Get the name of the GA for repeated use below.  We need this regardless
775     // of whether or not logging b/c it is used in a fatal error.
776     const string& name = theGA->GetName();
777 
778     // The initializer requires some additional logic to account for the
779     // possibility that JEGA is being used in a Dakota strategy.  If that is
780     // the case, the _initPts array will be non-empty and we will use them
781     // instead of whatever initialization has been specified by the user.
782     if(!this->_initPts.empty())
783     {
784         const GeneticAlgorithmInitializer& oldInit =
785             theGA->GetOperatorSet().GetInitializer();
786 
787         JEGALOG_II_G(lquiet(), this,
788             text_entry(lquiet(), name + ": discovered multiple initial "
789                 "points presumably supplied by a previous iterator in a "
790                 "strategy.  The \"" + oldInit.GetName() + "\" initializer "
791                 "will not be used and instead will be replaced with the "
792                 "double_matrix initializer which will read the supplied "
793                 "initial points."
794                 )
795             )
796 
797         pdb.AddIntegralParam(
798             "method.population_size", static_cast<int>(oldInit.GetSize())
799             );
800 
801         pdb.AddDoubleMatrixParam(
802             "method.jega.design_matrix", ToDoubleMatrix(initial_points())
803             );
804 
805         GeneticAlgorithmInitializer* newInit =
806             AllOperators::FullInstance().GetInitializer(
807                 "double_matrix", *theGA
808                 );
809 
810         JEGAIFLOG_II_G_F(newInit == 0x0, this,
811             text_entry(lfatal(), name + ": Unable to resolve "
812                 "Initializer \"double_matrix\".")
813             );
814 
815         JEGAIFLOG_II_F(!theGA->SetInitializer(newInit),
816             theGA->GetLogger(), this,
817             text_entry(lfatal(), name + ": Unable to set the initializer to "
818                 "double_matrix because it is incompatible with the other "
819                 "operators."
820                 )
821             )
822 
823         JEGAIFLOG_II_F(
824             !newInit->ExtractParameters(pdb), theGA->GetLogger(), this,
825             text_entry(lfatal(),
826                 name + ": Failed to retrieve the parameters for \"" +
827                 newInit->GetName() + "\".")
828             );
829 
830     }
831 
832     JEGALOG_II_G(lverbose(), this,
833         text_entry(lverbose(),
834             name + ": About to perform algorithm execution.")
835             )
836 
837     DesignOFSortSet bests(driver.PerformIterations(theGA));
838 
839     JEGALOG_II_G(lverbose(), this,
840         ostream_entry(lverbose(), name + ": algorithm execution completed. ")
841             << bests.size() << " solutions found. Passing them back to DAKOTA."
842         )
843 
844     // Return up to numBest solutions to DAKOTA, sorted first by L2
845     // constraint violation, then (utopia distance or weighted sum
846     // objective value).  So the single "best" will be at the front.
847     //
848     // Load up to numBest solutions into the arrays of best responses
849     // and variables.  If this is MOGA, the array will then contain
850     // the Pareto set.  If it is SOGA, it will contain all the
851     // solutions with the same best "fitness".
852 
853     // populate the sorted map of best solutions (fairly lightweight
854     // map) key is pair<constraintViolation, fitness>, where fitness
855     // is either utopia distance (MOGA) or objective value (SOGA)
856     std::multimap<RealRealPair, Design*> designSortMap;
857     this->GetBestSolutions(bests, *theGA, designSortMap);
858 
859     JEGAIFLOG_II_G(designSortMap.size() == 0, lquiet(), this,
860         text_entry(lquiet(), name + ": was unable to identify at least one "
861             "best solution.  The Dakota best variables and best responses "
862             "objects will be empty.\n\n")
863         )
864 
865     // load the map into the DAKOTA vectors
866     resize_best_resp_array(designSortMap.size());
867     resize_best_vars_array(designSortMap.size());
868 
869     std::multimap<RealRealPair, Design*>::const_iterator best_it =
870         designSortMap.begin();
871     const std::multimap<RealRealPair, Design*>::const_iterator best_end =
872         designSortMap.end();
873     ResponseArray::size_type index = 0;
874     for( ; best_it != best_end; ++best_it, ++index)
875     {
876         this->LoadDakotaResponses(
877         *(best_it->second),
878             this->bestVariablesArray[index],
879             this->bestResponseArray[index]
880             );
881     }
882 
883     // now we are done with our solution set so we can flush it
884     // per Driver rules.
885     bests.flush();
886 
887     JEGALOG_II_G(lquiet(), this,
888         text_entry(lquiet(), name + ": find optimum completed and all "
889             "results have been passed back to DAKOTA.\n\n")
890         )
891 
892     // We can not destroy our GA.
893     driver.DestroyAlgorithm(theGA);
894 }
895 
896 bool
accepts_multiple_points() const897 JEGAOptimizer::accepts_multiple_points(
898     ) const
899 {
900     return true;
901 }
902 
903 bool
returns_multiple_points() const904 JEGAOptimizer::returns_multiple_points(
905     ) const
906 {
907     return true;
908 }
909 
910 void
initial_points(const VariablesArray & pts)911 JEGAOptimizer::initial_points(
912     const VariablesArray& pts
913     )
914 {
915     this->_initPts = pts;
916 }
917 
918 const VariablesArray&
initial_points() const919 JEGAOptimizer::initial_points(
920     ) const
921 {
922     return this->_initPts;
923 }
924 
925 
926 /*
927 ===============================================================================
928 Subclass Visible Methods
929 ===============================================================================
930 */
931 
932 void
LoadDakotaResponses(const JEGA::Utilities::Design & des,Dakota::Variables & vars,Dakota::Response & resp) const933 JEGAOptimizer::LoadDakotaResponses(
934     const JEGA::Utilities::Design& des,
935     Dakota::Variables& vars,
936     Dakota::Response& resp
937     ) const
938 {
939     RealVector c_vars(this->numContinuousVars);
940     IntVector  di_vars(this->numDiscreteIntVars);
941     RealVector dr_vars(this->numDiscreteRealVars);
942 
943 //PDH: JEGA variables to Dakota variables.
944 //     Don't know what the JEGA data structure is.  These are all
945 //     mapped one entry at a time.
946 
947     // The first numContinuousVars of a design will be all the continuous
948     // variables of the problem (see LoadTheDesignVariables).
949     for(size_t i=0; i<this->numContinuousVars; ++i)
950         c_vars[i] = des.GetVariableValue(i);
951 
952     // The next set of variables represent the discrete integer variables.
953     for(size_t i=0; i<this->numDiscreteIntVars; ++i)
954         di_vars[i] = static_cast<int>(
955             des.GetVariableValue(i + this->numContinuousVars));
956 
957     // The next set of variables represent the discrete real variables.
958     for(size_t i=0; i<this->numDiscreteRealVars; ++i)
959         dr_vars[i] = des.GetVariableValue(
960             i + this->numContinuousVars + this->numDiscreteIntVars);
961 
962     // Finally, set the discrete string vars. These are mapped to discrete
963     // integers in JEGA, so they must be unmapped. Here, they also are set in
964     // vars using the single value setter to avoid creating a
965     // StringMultiArrayConstView
966 
967 //PDH: JEGA variables to Dakota variables.
968 //     Don't know what the JEGA data structure is.  These are all
969 //     mapped one entry at a time.
970 //     String variables also need to be remapped.
971 
972     const Pecos::MultivariateDistribution& mv_dist
973       = iteratedModel.multivariate_distribution();
974     std::shared_ptr<Pecos::MarginalsCorrDistribution> mvd_dist_rep =
975       std::static_pointer_cast<Pecos::MarginalsCorrDistribution>
976       (mv_dist.multivar_dist_rep());
977     StringSetArray ddss_values;
978     mvd_dist_rep->
979       pull_parameters(this->numContinuousVars+this->numDiscreteIntVars,
980 		      this->numDiscreteStringVars, Pecos::DSS_VALUES,
981 		      ddss_values);
982     for(size_t i=0; i<this->numDiscreteStringVars; ++i) {
983       const int &element_index = static_cast<int>(des.GetVariableValue(i +
984 	    this->numContinuousVars + this->numDiscreteIntVars +
985 	    this->numDiscreteRealVars));
986       const String &ds_var = set_index_to_value(element_index, ddss_values[i]);
987       vars.discrete_string_variable(ds_var, i);
988     }
989     vars.continuous_variables(c_vars);
990     vars.discrete_int_variables(di_vars);
991     vars.discrete_real_variables(dr_vars);
992 
993 //PDH: JEGA responses to Dakota responses.
994 //     Don't know what the JEGA data structure is.  These are all
995 //     mapped one entry at a time.
996 //     Need to respect constraint ordering.
997 
998     // BMA TODO: Could always populate constraints and just get
999     // primary responses from the DB, as in SNLL
1000     if (!localObjectiveRecast) {  // else local_recast_retrieve
1001       RealVector fn_vals(this->numFunctions);
1002       for(size_t i=0; i<this->numObjectiveFns; i++)
1003 	fn_vals[i]= des.GetObjective(i);
1004 
1005       // JEGA constraint ordering is nonlinear inequality, nonlinear equality,
1006       // linear inequality, linear equality
1007       // (see JEGAOptimizer::LoadTheConstraints()).
1008       for(size_t i=0; i<static_cast<size_t>(this->numNonlinearConstraints); ++i)
1009         fn_vals[i+this->numObjectiveFns] = des.GetConstraint(i);
1010 
1011       resp.function_values(fn_vals);
1012     }
1013 }
1014 
1015 void
ReCreateTheParameterDatabase()1016 JEGAOptimizer::ReCreateTheParameterDatabase(
1017     )
1018 {
1019     EDDY_FUNC_DEBUGSCOPE
1020     delete this->_theParamDB;
1021     this->_theParamDB = new BasicParameterDatabaseImpl();
1022 }
1023 
1024 void
LoadTheParameterDatabase()1025 JEGAOptimizer::LoadTheParameterDatabase(
1026     )
1027 {
1028     EDDY_FUNC_DEBUGSCOPE
1029 
1030     this->ReCreateTheParameterDatabase();
1031 
1032     // Duplicate in all the integral parameters.
1033     const int& random_seed = probDescDB.get_int("method.random_seed");
1034     if (random_seed != 0)
1035       this->_theParamDB->AddIntegralParam(
1036 	  "method.random_seed", random_seed
1037         );
1038     else
1039       this->_theParamDB->AddIntegralParam(
1040 	  "method.random_seed", -1
1041         );
1042 
1043     // now all the reals
1044     const Real& constraint_penalty = probDescDB.get_real("method.constraint_penalty");
1045     if (constraint_penalty >= 0.)
1046       this->_theParamDB->AddDoubleParam(
1047 	  "method.constraint_penalty", constraint_penalty
1048         );
1049     else
1050        this->_theParamDB->AddDoubleParam(
1051 	   "method.constraint_penalty", 1.0
1052         );
1053     const Real& crossover_rate = probDescDB.get_real("method.crossover_rate");
1054     if (crossover_rate >= 0.)
1055       this->_theParamDB->AddDoubleParam(
1056           "method.crossover_rate", crossover_rate
1057         );
1058     else
1059       this->_theParamDB->AddDoubleParam(
1060           "method.crossover_rate", 0.75
1061         );
1062     if (this->probDescDB.get_string("method.mutation_type") != "")
1063       this->_theParamDB->AddDoubleParam(
1064 	  "method.mutation_rate",
1065 	  probDescDB.get_real("method.mutation_rate")
1066 	  );
1067     else
1068       this->_theParamDB->AddDoubleParam(
1069 	  "method.mutation_rate", 0.1
1070 	  );
1071     this->_theParamDB->AddDoubleParam(
1072         "method.mutation_scale",
1073         probDescDB.get_real("method.mutation_scale")
1074         );
1075     double perc_change = probDescDB.get_real("method.jega.percent_change");
1076     this->_theParamDB->AddDoubleParam(
1077         "method.jega.percent_change",
1078 	(perc_change < 0) ? 1.0e-4 : perc_change
1079         );
1080     double conv_tol = probDescDB.get_real("method.convergence_tolerance");
1081     this->_theParamDB->AddDoubleParam(
1082         "method.convergence_tolerance",
1083 	(conv_tol < 0) ? 1.0e-4 : conv_tol
1084         );
1085     this->_theParamDB->AddDoubleParam(
1086         "method.jega.shrinkage_percentage",
1087         probDescDB.get_real("method.jega.shrinkage_percentage")
1088         );
1089     this->_theParamDB->AddDoubleParam(
1090         "method.jega.fitness_limit",
1091         probDescDB.get_real("method.jega.fitness_limit")
1092         );
1093 
1094     // now get all the size_t's
1095     this->_theParamDB->AddSizeTypeParam(
1096         "method.jega.num_cross_points",
1097         probDescDB.get_sizet("method.jega.num_cross_points")
1098         );
1099     this->_theParamDB->AddSizeTypeParam(
1100         "method.jega.num_parents",
1101         probDescDB.get_sizet("method.jega.num_parents")
1102         );
1103     this->_theParamDB->AddSizeTypeParam(
1104         "method.jega.num_offspring",
1105         probDescDB.get_sizet("method.jega.num_offspring")
1106         );
1107     this->_theParamDB->AddSizeTypeParam(
1108         "method.jega.num_generations",
1109         probDescDB.get_sizet("method.jega.num_generations")
1110         );
1111     this->_theParamDB->AddSizeTypeParam(
1112         "method.jega.max_designs",
1113         probDescDB.get_sizet("method.jega.num_designs")
1114         );
1115     // Note that the population size, max evals, and max gens are in as ints.
1116     // Do a conversion for each here.
1117     this->_theParamDB->AddSizeTypeParam(
1118         "method.population_size",
1119         static_cast<size_t>(probDescDB.get_int("method.population_size"))
1120         );
1121     this->_theParamDB->AddSizeTypeParam(
1122         "method.max_iterations",
1123         static_cast<size_t>(probDescDB.get_int("method.max_iterations"))
1124         );
1125     this->_theParamDB->AddSizeTypeParam(
1126         "method.max_function_evaluations",
1127         static_cast<size_t>(
1128             probDescDB.get_int("method.max_function_evaluations")
1129             )
1130         );
1131 
1132     // Dakota does not currently expose the input that indicates that there is
1133     // a minimum allowable population size for the below limit selector.  Add
1134     // a default explicitly here to prevent any problems.
1135     this->_theParamDB->AddSizeTypeParam("method.jega.minimum_selections", 2);
1136 
1137     // Dakota does not currently expose the evaluation concurrency flag
1138     // through the interface nor should it b/c Dakota handles evaluations.
1139     this->_theParamDB->AddSizeTypeParam("method.jega.eval_concurrency", 1);
1140 
1141     // Now get all the booleans
1142     this->_theParamDB->AddBooleanParam(
1143         "method.print_each_pop",
1144         probDescDB.get_bool("method.print_each_pop")
1145         );
1146 
1147     // Dakota does not currently expose the flag to instruct the GA whether or
1148     // not to write the final data file and discards file.  Put those in here
1149     // to avoid warnings about them missing.
1150     this->_theParamDB->AddBooleanParam("method.print_final_data", true);
1151     this->_theParamDB->AddBooleanParam("method.print_discards", true);
1152 
1153     // Dakota does not currently expose the flag to instruct a nicher
1154     // as to whether or not to cache niched designs.  So put that flag in here
1155     // with a default true value.  It won't hurt anything if it is not needed.
1156     this->_theParamDB->AddBooleanParam("method.jega.cache_niched_designs", true);
1157 
1158     // now get all the strings.
1159 
1160     // Dakota does not expose the ability to specify the weighted sum
1161     // only fitness assessor b/c it is only for use with the favor
1162     // feasible selector.  Likewise, the favor feasible selector can
1163     // only be used with the weighted sum fitness asessor.  Because of
1164     // this, we will detect use of the favor feasible and enforce
1165     // the use of the weighted sum only.  We will write a log message
1166     // about it.
1167     const string& selector =
1168         this->probDescDB.get_string("method.replacement_type");
1169     if (selector != "")
1170       this->_theParamDB->AddStringParam(
1171 	  "method.replacement_type", selector);
1172     else if (this->methodName == SOGA)
1173       this->_theParamDB->AddStringParam(
1174 	  "method.replacement_type", "elitist");
1175     else if (this->methodName == MOGA)
1176       this->_theParamDB->AddStringParam(
1177 	  "method.replacement_type", "below_limit");
1178 
1179     const string& fitness = this->probDescDB.get_string("method.fitness_type");
1180     if(selector == "favor_feasible")
1181     {
1182         JEGALOG_II_G(lquiet(), this,
1183             text_entry(lquiet(),
1184             "Use of the favor_feasible selector has been detected.  Use of "
1185             "this selector type requires use of the \"weighted_sum_only\" "
1186             "fitness assessor.  Therefore, use of the \"" + fitness +
1187             "\" will be changed to use of the \"weighted_sum_only\".")
1188             )
1189 
1190         this->_theParamDB->AddStringParam(
1191             "method.fitness_type", "weighted_sum_only"
1192             );
1193     }
1194     else
1195     {
1196       if (fitness != "")
1197         this->_theParamDB->AddStringParam("method.fitness_type", fitness);
1198       else if (this->methodName == SOGA)
1199         this->_theParamDB->AddStringParam("method.fitness_type", "merit_function");
1200       else if (this->methodName == MOGA)
1201         this->_theParamDB->AddStringParam("method.fitness_type", "domination_count");
1202     }
1203 
1204     const string& crossover_operator =
1205         this->probDescDB.get_string("method.crossover_type");
1206     if (crossover_operator != "")
1207       this->_theParamDB->AddStringParam(
1208 	  "method.crossover_type", crossover_operator);
1209     else
1210       this->_theParamDB->AddStringParam(
1211 	  "method.crossover_type", "shuffle_random");
1212 
1213     const string& mutation_operator =
1214         this->probDescDB.get_string("method.mutation_type");
1215     if (mutation_operator != "")
1216       this->_theParamDB->AddStringParam(
1217 	  "method.mutation_type", mutation_operator);
1218     else
1219       this->_theParamDB->AddStringParam(
1220 	  "method.mutation_type", "replace_uniform");
1221 
1222    this->_theParamDB->AddIntegralParam(
1223         "method.output",
1224         this->probDescDB.get_short("method.output")
1225         );
1226     this->_theParamDB->AddStringParam(
1227         "method.initialization_type",
1228         this->probDescDB.get_string("method.initialization_type")
1229         );
1230     this->_theParamDB->AddStringParam(
1231         "method.flat_file",
1232         this->probDescDB.get_string("method.flat_file")
1233         );
1234 
1235     // Dakota does not currently expose the input that allows one to specify
1236     // the location of any data files written by JEGA.  Use the default but
1237     // specify it explicitly here to prevent any problems.
1238     this->_theParamDB->AddStringParam(
1239         "method.jega.data_directory", "./"
1240         );
1241 
1242     // Dakota does not currently expose the final data file name pattern
1243     // through the interface.
1244     this->_theParamDB->AddStringParam(
1245         "method.jega.final_data_filename", "finaldata#.dat"
1246         );
1247 
1248     // Dakota does not currently expose the main loop operator selection
1249     // through the interface.
1250     this->_theParamDB->AddStringParam(
1251         "method.jega.mainloop_type", "duplicate_free"
1252         );
1253 
1254     // The log file gets special attention.  If it is the default global log
1255     // file name, we will replace it with an empty string b/c we don't want the
1256     // created GA to think it owns the global log file.
1257     string log_file = probDescDB.get_string("method.log_file");
1258     this->_theParamDB->AddStringParam(
1259         "method.log_file",
1260         log_file == "JEGAGlobal.log" ? "" : log_file
1261         );
1262     const string& convergence_operator =
1263         this->probDescDB.get_string("method.jega.convergence_type");
1264 
1265     if (convergence_operator != "")
1266       this->_theParamDB->AddStringParam(
1267 	  "method.jega.convergence_type", convergence_operator);
1268     else if (this->methodName == SOGA)
1269       this->_theParamDB->AddStringParam(
1270 	  "method.jega.convergence_type", "average_fitness_tracker");
1271     else if (this->methodName == MOGA)
1272       this->_theParamDB->AddStringParam(
1273 	  "method.jega.convergence_type", "metric_tracker");
1274 
1275     this->_theParamDB->AddStringParam(
1276         "method.jega.niching_type",
1277         this->probDescDB.get_string("method.jega.niching_type")
1278         );
1279     this->_theParamDB->AddStringParam(
1280         "method.jega.postprocessor_type",
1281         this->probDescDB.get_string("method.jega.postprocessor_type")
1282         );
1283 
1284     // Dakota does not expose a flat file delimiter for the case where we
1285     // are using the flat file initializer but the initializer is going to
1286     // be looking for one if it is in use.
1287     this->_theParamDB->AddStringParam("method.jega.initializer_delimiter", "");
1288 
1289     // now get all vector of doubles.
1290     const RealVector *dak_rv
1291         = &this->probDescDB.get_rv("method.jega.niche_vector");
1292 
1293     JEGA::DoubleVector niche_vector(
1294         dak_rv->values(),
1295         dak_rv->values() + dak_rv->length()
1296         );
1297 
1298     this->_theParamDB->AddDoubleVectorParam(
1299         "method.jega.niche_vector",
1300         niche_vector
1301         );
1302 
1303     dak_rv = &this->probDescDB.get_rv("method.jega.distance_vector");
1304     JEGA::DoubleVector distance_vector(
1305         dak_rv->values(),
1306         dak_rv->values() + dak_rv->length()
1307         );
1308 
1309     this->_theParamDB->AddDoubleVectorParam(
1310         "method.jega.distance_vector",
1311         distance_vector
1312         );
1313 
1314     // when recasting is active, the weights may be transformed; get off Model
1315     dak_rv = &iteratedModel.primary_response_fn_weights();
1316     JEGA::DoubleVector mow_vector(
1317         dak_rv->values(),
1318         dak_rv->values() + dak_rv->length()
1319         );
1320 
1321     this->_theParamDB->AddDoubleVectorParam(
1322         "responses.multi_objective_weights",
1323         mow_vector
1324         );
1325 
1326     // Dakota does not expose a capability to enter multiple flat file names
1327     // as a vector (can delimit them in the single string and JEGA will parse
1328     // them).  We will add an empty vector to prevent the warning from JEGA.
1329     this->_theParamDB->AddStringVectorParam(
1330         "method.flat_files", std::vector<std::string>()
1331         );
1332 
1333 
1334 
1335     // now get all int vector params.
1336     // nothing to do here.
1337 
1338     // now get all the double matrices.
1339     // nothing to do here.
1340 
1341     // now get all integer list params.
1342     // nothing to do here.
1343 
1344     // now get all string vectors
1345     // nothing to do here.
1346 
1347     // now get all string lists
1348     // nothing to do here.
1349 }
1350 
1351 void
LoadAlgorithmConfig(JEGA::FrontEnd::AlgorithmConfig & aConfig)1352 JEGAOptimizer::LoadAlgorithmConfig(
1353     JEGA::FrontEnd::AlgorithmConfig& aConfig
1354     )
1355 {
1356     EDDY_FUNC_DEBUGSCOPE
1357 
1358     ParameterDatabase& pdb = aConfig.GetParameterDB();
1359 
1360     // Determine what kind of algorithm we are creating (MOGA or SOGA)
1361     // based on the methodName base class variable.
1362     AlgorithmConfig::AlgType algType;
1363 
1364     if(this->methodName == MOGA)
1365         algType = AlgorithmConfig::MOGA;
1366 
1367     else if(this->methodName == SOGA)
1368         algType = AlgorithmConfig::SOGA;
1369 
1370     else
1371         JEGALOG_II_G_F(this,
1372             text_entry(lfatal(), "JEGA Error: \"" +
1373 		       method_enum_to_string(this->methodName) +
1374 		       "\" is an invalid method specification.")
1375             )
1376 
1377     aConfig.SetAlgorithmType(algType);
1378 
1379     // We will use the method id as the algorithm name if it is non-empty and
1380     // we will otherwise use the method name.
1381     aConfig.SetAlgorithmName(
1382         this->method_id().empty() ?
1383 	  method_enum_to_string(this->methodName) : this->method_id()
1384         );
1385 }
1386 
1387 void
LoadProblemConfig(JEGA::FrontEnd::ProblemConfig & pConfig)1388 JEGAOptimizer::LoadProblemConfig(
1389     JEGA::FrontEnd::ProblemConfig& pConfig
1390     )
1391 {
1392     EDDY_FUNC_DEBUGSCOPE
1393 
1394     // This is carried out by loading the design variables, objective
1395     // functions, and constraints.
1396     this->LoadTheDesignVariables(pConfig);
1397     this->LoadTheObjectiveFunctions(pConfig);
1398     this->LoadTheConstraints(pConfig);
1399 }
1400 
1401 void
LoadTheDesignVariables(JEGA::FrontEnd::ProblemConfig & pConfig)1402 JEGAOptimizer::LoadTheDesignVariables(
1403     JEGA::FrontEnd::ProblemConfig& pConfig
1404     )
1405 {
1406     EDDY_FUNC_DEBUGSCOPE
1407 
1408     // The information needed to create the design variable infos
1409     // is contained in the data structures of the base classes.
1410     // In particular, the Model (iteratedModel) has most of the
1411     // info.  We will create a shorthand for it here to ease syntax.
1412     Model& m = this->iteratedModel;
1413     size_t i, j, dsi_cntr;
1414 
1415     // Loop over all continuous variables and add an info object.  Don't worry
1416     // about the precision so much.  It is only considered by the operators
1417     // that do binary encoding such as the NPointParameterizedBinaryCrosser and
1418     // the RandomBitMutator.  Other than that, it is largely ignored by this
1419     // implementation.  It can have a fairly profound effect on the preformance
1420     // of those operators that use it to encode to binary.
1421 
1422 //PDH: Should be able to simplify all of this code quite a bit.  As
1423 //far as I can tell, the JEGA vectors are all just std vectors of the
1424 //corresponding type.  Not sure what exactly pConfig is, though.
1425 
1426     const RealVector& clbs = m.continuous_lower_bounds();
1427     const RealVector& cubs = m.continuous_upper_bounds();
1428     StringMultiArrayConstView clabels = m.continuous_variable_labels();
1429     for(i=0; i<this->numContinuousVars; ++i)
1430       pConfig.AddContinuumRealVariable(clabels[i], clbs[i], cubs[i], 6);
1431 
1432     // now move on to the discrete variables.  The data for those is in the
1433     // Model as discrete_lower_bounds, discrete_upper_bounds, and
1434     // discrete_variable_labels.
1435     const IntVector& dilbs = m.discrete_int_lower_bounds();
1436     const IntVector& diubs = m.discrete_int_upper_bounds();
1437     StringMultiArrayConstView dilabels = m.discrete_int_variable_labels();
1438     const BitArray& di_set_bits = m.discrete_int_sets();
1439     const IntSetArray& dsiv = m.discrete_set_int_values();
1440     for(i=0, dsi_cntr=0; i<this->numDiscreteIntVars; ++i)
1441     {
1442       if (di_set_bits[i]) { // discrete set variables
1443         const IntSet& dak_set = dsiv[dsi_cntr];
1444         pConfig.AddDiscreteIntegerVariable(dilabels[i],
1445 	  JEGA::IntVector(dak_set.begin(), dak_set.end()) );
1446 	++dsi_cntr;
1447       }
1448       else // discrete range variables
1449         pConfig.AddContinuumIntegerVariable(dilabels[i], dilbs[i], diubs[i]);
1450     }
1451 
1452     // Next, load in the "discrete set of real" variables.
1453     const RealSetArray& dsrv = m.discrete_set_real_values();
1454     StringMultiArrayConstView drlabels = m.discrete_real_variable_labels();
1455     for(i=0; i<this->numDiscreteRealVars; ++i)
1456     {
1457       const RealSet& dak_set = dsrv[i];
1458       pConfig.AddDiscreteRealVariable(drlabels[i],
1459 	JEGA::DoubleVector(dak_set.begin(), dak_set.end()) );
1460     }
1461 
1462 //PDH: Have to map the string variables to indices.
1463 
1464     // Finally, load in the "discrete set of string" variables. These must
1465     // be mapped to discrete integer variables.
1466     StringMultiArrayConstView dslabels = m.discrete_string_variable_labels();
1467     const StringSetArray& dssv_values = m.discrete_set_string_values();
1468     for (i=0; i<this->numDiscreteStringVars; ++i) {
1469       const size_t &num_elements = dssv_values[i].size(); //assume > 0
1470       IntArray element_index(num_elements);
1471       for (j=0; j<num_elements; ++j)
1472 	element_index[j] = j;
1473       pConfig.AddDiscreteIntegerVariable(dslabels[i],
1474 	            JEGA::IntVector(element_index.begin(),
1475 		      element_index.end()) );
1476     }
1477 
1478     // Now make sure that an info was created for each variable.
1479     EDDY_ASSERT(pConfig.GetDesignTarget().GetNDV() == (this->numContinuousVars +
1480 		this->numDiscreteIntVars + this->numDiscreteRealVars +
1481 		this->numDiscreteStringVars));
1482 }
1483 
1484 void
LoadTheObjectiveFunctions(JEGA::FrontEnd::ProblemConfig & pConfig)1485 JEGAOptimizer::LoadTheObjectiveFunctions(
1486     JEGA::FrontEnd::ProblemConfig& pConfig
1487     )
1488 {
1489     EDDY_FUNC_DEBUGSCOPE
1490 
1491     // For now, all objectives will be of type minimize.  Hopefully,
1492     // Dakota will soon support mixed extremization schemes.
1493     // Dakota does not support labeling objectives.  Until it does,
1494     // we will create a label that looks like "Nature Type Index".
1495     const StringArray&  labels = iteratedModel.response_labels();
1496     const BoolDeque& max_sense = iteratedModel.primary_response_fn_sense();
1497     bool use_sense = !max_sense.empty();
1498     for(size_t i=0; i<this->numObjectiveFns; ++i)
1499       if (use_sense && max_sense[i])
1500         pConfig.AddNonlinearMaximizeObjective(
1501 	    "Non-Linear Maximize " + labels[i]
1502             );
1503       else
1504         pConfig.AddNonlinearMinimizeObjective(
1505             "Non-Linear Minimize " + labels[i]
1506             );
1507 
1508     // see to it that the numbers match up.
1509     EDDY_ASSERT(pConfig.GetDesignTarget().GetNOF() == this->numObjectiveFns);
1510 }
1511 
1512 void
LoadTheConstraints(JEGA::FrontEnd::ProblemConfig & pConfig)1513 JEGAOptimizer::LoadTheConstraints(
1514     JEGA::FrontEnd::ProblemConfig& pConfig
1515     )
1516 {
1517     EDDY_FUNC_DEBUGSCOPE
1518 
1519     // The information needed to create the constraint infos
1520     // is contained in the data structures of the base classes.
1521     // In particular, the Model (iteratedModel) has most of the
1522     // info.  We will create a shorthand for it here to ease syntax.
1523     const Model& m = this->iteratedModel;
1524 
1525     /**************************************************************************
1526 
1527     Note the order in which these are created.  Do not change this order.  It
1528     is this way because of the order in which responses are returned out of the
1529     Model.  Technically, it only involves the first two blocks which create the
1530     non-linear constraints.  But don't mess with any of it anyway.
1531 
1532     **************************************************************************/
1533 
1534 
1535     // start with non-linear (2-sided) inequality constraints.
1536     // The information we need for these is in
1537     // nonlinear_ineq_constraint_lower_bounds and
1538     // nonlinear_ineq_constraint_upper_bounds.  As with objective
1539     // functions, Dakota does not allow labeling of constraints.
1540     // we will create a label that looks like "Nature Type Index".
1541     const RealVector& nln_ineq_lwr_bnds
1542         = m.nonlinear_ineq_constraint_lower_bounds();
1543     const RealVector& nln_ineq_upr_bnds
1544         = m.nonlinear_ineq_constraint_upper_bounds();
1545 
1546 //PDH: Dakota nonlinear constraints to JEGA nonlinear constraints.
1547 //     Don't know what the JEGA data structure is.  These are all
1548 //     mapped one entry at a time.
1549 //     Looks like we don't have to worry about (JEGA) order.  Need to
1550 //     determine if they have to be two-sided for JEGA.
1551 
1552     // Loop over all two sided non linear inequality constraitns and add an
1553     // info object for each.
1554     for(size_t i=0; i<this->numNonlinearIneqConstraints; ++i)
1555         pConfig.AddNonlinearTwoSidedInequalityConstraint(
1556             "Non-Linear Two-Sided Inequality " + asstring(i),
1557             nln_ineq_lwr_bnds[i], nln_ineq_upr_bnds[i]
1558             );
1559 
1560     // now do non-linear equality constraints.  The information we need for
1561     // these is in nonlinear_eq_constraint_targets.
1562     const RealVector& nln_eq_targets = m.nonlinear_eq_constraint_targets();
1563     for(size_t i=0; i<this->numNonlinearEqConstraints; ++i)
1564         pConfig.AddNonlinearEqualityConstraint(
1565             "Non-Linear Equality " + asstring(i), nln_eq_targets[i]
1566             );
1567 
1568 //PDH: Dakota linear constraints to JEGA linear constraints.
1569 //     Don't know what the JEGA data structure is.  These are all
1570 //     mapped one entry at a time.
1571 //     Looks like we don't have to worry about (JEGA) order.  Need to
1572 //     determine if they have to be two-sided for JEGA.
1573 
1574     // now do linear (2-sided) inequality constraints  The information we need
1575     // for these is in linear_ineq_constraint_lower_bounds and
1576     // linear_ineq_constraint_upper_bounds.
1577     // In addition to bounds, these accept coefficients for possible shortcut
1578     // evaluation.  That information is in linear_ineq_constraint_coeffs.
1579     const RealVector& lin_ineq_lwr_bnds
1580         = m.linear_ineq_constraint_lower_bounds();
1581     const RealVector& lin_ineq_upr_bnds
1582         = m.linear_ineq_constraint_upper_bounds();
1583     const RealMatrix& lin_ineq_coeffs
1584         = m.linear_ineq_constraint_coeffs();
1585 
1586     JEGA::DoubleVector lin_ineq_coeffs_row(lin_ineq_coeffs.numCols());
1587 
1588 //PDH: RealMatrix -> set of std::vector
1589 //     Just need the individual rows.  Check copy_row_vector to see if
1590 //     transpose is also needed.
1591 
1592     for(size_t i=0; i<numLinearIneqConstraints; ++i) {
1593         copy_row_vector(lin_ineq_coeffs, i, lin_ineq_coeffs_row);
1594 
1595         pConfig.AddLinearTwoSidedInequalityConstraint(
1596             "Linear Two-Sided Inequality " + asstring(i),
1597             lin_ineq_lwr_bnds[i], lin_ineq_upr_bnds[i],
1598             lin_ineq_coeffs_row
1599             );
1600     }
1601 
1602     // now do linear equality constraints.  The information we need for these
1603     // is in lin_eq_targets. In addition to targets, these accept coefficients
1604     // for possible shortcut evaluation.  That information is in
1605     // linear_eq_constraint_coeffs.
1606     const RealVector& lin_eq_targets = m.linear_eq_constraint_targets();
1607     const RealMatrix& lin_eq_coeffs = m.linear_eq_constraint_coeffs();
1608 
1609     JEGA::DoubleVector lin_eq_coeffs_row(lin_eq_coeffs.numCols());
1610 
1611 //PDH: RealMatrix -> set of std::vector
1612 //     Just need the individual rows.  Check copy_row_vector to see if
1613 //     transpose is also needed.
1614 
1615     for(size_t i=0; i<numLinearEqConstraints; ++i) {
1616         copy_row_vector(lin_eq_coeffs, i, lin_eq_coeffs_row);
1617 
1618         pConfig.AddLinearEqualityConstraint(
1619             "Linear Equality " + asstring(i),
1620             lin_eq_targets[i], 0.0, lin_eq_coeffs_row
1621             );
1622     }
1623 
1624     // see to it that the numbers match up.
1625     EDDY_ASSERT(pConfig.GetDesignTarget().GetNCN() ==
1626         (this->numNonlinearIneqConstraints + this->numLinearIneqConstraints +
1627         this->numNonlinearEqConstraints + this->numLinearEqConstraints));
1628 }
1629 
1630 void
GetBestSolutions(const JEGA::Utilities::DesignOFSortSet & from,const JEGA::Algorithms::GeneticAlgorithm & theGA,std::multimap<RealRealPair,JEGA::Utilities::Design * > & designSortMap)1631 JEGAOptimizer::GetBestSolutions(
1632     const JEGA::Utilities::DesignOFSortSet& from,
1633     const JEGA::Algorithms::GeneticAlgorithm& theGA,
1634     std::multimap<RealRealPair, JEGA::Utilities::Design*>& designSortMap
1635     )
1636 {
1637     EDDY_FUNC_DEBUGSCOPE
1638 
1639     if(this->methodName == MOGA)
1640         this->GetBestMOSolutions(from, theGA, designSortMap);
1641 
1642     else if(this->methodName == SOGA)
1643         this->GetBestSOSolutions(from, theGA, designSortMap);
1644 
1645     else
1646     {
1647         JEGALOG_II_G_F(this,
1648             text_entry(lfatal(), "JEGA Error: \"" +
1649 		       method_enum_to_string(this->methodName) +
1650 		       "\" is an invalid method specification.")
1651             )
1652     }
1653 }
1654 
1655 
1656 void
GetBestMOSolutions(const JEGA::Utilities::DesignOFSortSet & from,const JEGA::Algorithms::GeneticAlgorithm & theGA,std::multimap<RealRealPair,JEGA::Utilities::Design * > & designSortMap)1657 JEGAOptimizer::GetBestMOSolutions(
1658     const JEGA::Utilities::DesignOFSortSet& from,
1659     const JEGA::Algorithms::GeneticAlgorithm& theGA,
1660     std::multimap<RealRealPair, JEGA::Utilities::Design*>& designSortMap
1661     )
1662 {
1663     EDDY_FUNC_DEBUGSCOPE
1664 
1665     if(from.empty())
1666         return;
1667 
1668     // Start by removing any infeasible from "from".
1669     DesignOFSortSet feasible(DesignStatistician::GetFeasible(from));
1670 
1671     // We need the extremes of the feasible solutions; if no feasible
1672     // designs, this will be empty and unused below.
1673     DoubleExtremes extremeSet(
1674         MultiObjectiveStatistician::FindParetoExtremes(feasible)
1675         );
1676 
1677     const DesignTarget& target = from.front()->GetDesignTarget();
1678     const ConstraintInfoVector& cnInfos = target.GetConstraintInfos();
1679 
1680     // get number of objective functions
1681     const eddy::utilities::uint64_t nof = target.GetNOF();
1682 
1683     // get total number of constraints (nonlinear and linear)
1684     const eddy::utilities::uint64_t noc = target.GetNCN();
1685 
1686     // iterate the designs and sort first by constraint violation,
1687     // then objective function
1688     DesignOFSortSet::const_iterator design_it(from.begin());
1689     const DesignOFSortSet::const_iterator design_end(from.end());
1690 
1691     for(; design_it != design_end; ++design_it)
1692     {
1693         // L2 constraint violation for this design
1694         double constraintViolation = 0.0;
1695 
1696         for(size_t i=0; i<noc; ++i)
1697             constraintViolation +=
1698                 Math::Pow(cnInfos[i]->GetViolationAmount(**design_it), 2);
1699 
1700         // sum-of-squared distance for this Design over objective functions.
1701         double utopiaDistance = 0.0;
1702         if(constraintViolation > 0.0)
1703             utopiaDistance = DBL_MAX;
1704         else
1705         {
1706             // compute the sum-of-squares between the current point
1707             // and the utopia point; if the feasible set is empty,
1708             // we should never reach this block,
1709             for(size_t i=0; i<nof; ++i)
1710                 utopiaDistance += Math::Pow(
1711                     (*design_it)->GetObjective(i) - extremeSet.get_min(i), 2
1712                     );
1713         }
1714 
1715         // insert the design into the map, keeping only numBest
1716         RealRealPair metrics(constraintViolation, utopiaDistance);
1717 
1718         if(designSortMap.size() < this->numFinalSolutions)
1719             designSortMap.insert(std::make_pair(metrics, *design_it));
1720         else
1721         {
1722             // if this Design is better than the worst, remove worst
1723             // and insert this one
1724             std::multimap<RealRealPair, Design*>::iterator worst_it =
1725                 --designSortMap.end();
1726 
1727             if(metrics < worst_it->first)
1728             {
1729                 designSortMap.erase(worst_it);
1730                 designSortMap.insert(std::make_pair(metrics, *design_it));
1731             }
1732         }
1733     }
1734 }
1735 
1736 
1737 void
GetBestSOSolutions(const JEGA::Utilities::DesignOFSortSet & from,const JEGA::Algorithms::GeneticAlgorithm & theGA,std::multimap<RealRealPair,JEGA::Utilities::Design * > & designSortMap)1738 JEGAOptimizer::GetBestSOSolutions(
1739     const JEGA::Utilities::DesignOFSortSet& from,
1740     const JEGA::Algorithms::GeneticAlgorithm& theGA,
1741     std::multimap<RealRealPair, JEGA::Utilities::Design*>& designSortMap
1742     )
1743 {
1744     EDDY_FUNC_DEBUGSCOPE
1745 
1746     if(from.empty()) return;
1747 
1748     const DesignTarget& target = from.front()->GetDesignTarget();
1749     const ConstraintInfoVector& cnInfos = target.GetConstraintInfos();
1750 
1751     // get number of objective functions
1752     const eddy::utilities::uint64_t nof = target.GetNOF();
1753 
1754     // get total number of constraints (nonlinear and linear)
1755     const eddy::utilities::uint64_t noc = target.GetNCN();
1756 
1757     // in order to order the points, need the weights; get them from
1758     // the GA to ensure solver/final results consistency
1759     JEGA::DoubleVector weights;
1760     try
1761     {
1762         const JEGA::Algorithms::SOGA&
1763 	    the_ga = dynamic_cast<const JEGA::Algorithms::SOGA&>(theGA);
1764 	weights = the_ga.GetWeights();
1765     }
1766     catch(const std::bad_cast& bc_except)
1767     {
1768       Cerr << "\nError: could not cast GeneticAlgorithm to SOGA; exception:\n"
1769 	   << bc_except.what() << std::endl;
1770       abort_handler(-1);
1771     }
1772 
1773     // iterate the designs and sort first by constraint violation,
1774     // then objective function
1775     DesignOFSortSet::const_iterator design_it(from.begin());
1776     const DesignOFSortSet::const_iterator design_end(from.end());
1777 
1778     for(; design_it != design_end; ++design_it)
1779     {
1780         // L2 constraint violation for this design
1781         double constraintViolation = 0.0;
1782 
1783         for(size_t i=0; i<noc; ++i)
1784             constraintViolation +=
1785                 Math::Pow(cnInfos[i]->GetViolationAmount(**design_it), 2);
1786 
1787         // Multi-objective sum for this Design over objective functions.
1788         // In the single objective case we can store
1789         // objective even if there's a constraint violation.
1790         double objectiveFunction =
1791             SingleObjectiveStatistician::ComputeWeightedSum(
1792                 **design_it, weights
1793                 );
1794 
1795         // insert the design into the map, keeping only numBest
1796         RealRealPair metrics(constraintViolation, objectiveFunction);
1797 
1798         if(designSortMap.size() < this->numFinalSolutions)
1799             designSortMap.insert(std::make_pair(metrics, *design_it));
1800         else
1801         {
1802             // if this Design is better than the worst, remove worst
1803             // and insert this one
1804             std::multimap<RealRealPair, Design*>::iterator worst_it =
1805                 --designSortMap.end();
1806 
1807             if(metrics < worst_it->first)
1808             {
1809                 designSortMap.erase(worst_it);
1810                 designSortMap.insert(std::make_pair(metrics, *design_it));
1811             }
1812         }
1813     }
1814 }
1815 
1816 
1817 JEGA::DoubleMatrix
ToDoubleMatrix(const VariablesArray & variables) const1818 JEGAOptimizer::ToDoubleMatrix(
1819     const VariablesArray& variables
1820     ) const
1821 {
1822     EDDY_FUNC_DEBUGSCOPE
1823 
1824     // Prepare the resultant matrix with proper initial capacity
1825     JEGA::DoubleMatrix ret(variables.size());
1826 
1827     // Iterate the variables objects and create entries in the new matrix
1828     size_t i = 0;
1829     for(VariablesArray::const_iterator it(variables.begin());
1830         it!=variables.end(); ++it, ++i)
1831     {
1832         // Store the continuous and discrete variables arrays for use below.
1833         const RealVector& cvs  = (*it).continuous_variables();
1834         const IntVector&  divs = (*it).discrete_int_variables();
1835         const RealVector& drvs = (*it).discrete_real_variables();
1836 
1837         // Prepare the row we are working with to hold all variable values.
1838         ret[i].reserve(cvs.length() + divs.length() + drvs.length());
1839 
1840         // Copy in first the continuous followed by the discrete values.
1841         ret[i].insert(ret[i].end(), cvs.values(), cvs.values()+cvs.length());
1842         ret[i].insert(ret[i].end(), divs.values(), divs.values()+divs.length());
1843         ret[i].insert(ret[i].end(), drvs.values(), drvs.values()+drvs.length());
1844     }
1845 
1846     return ret;
1847 }
1848 
1849 
1850 /*
1851 ===============================================================================
1852 Subclass Overridable Methods
1853 ===============================================================================
1854 */
1855 
1856 
1857 
1858 
1859 /*
1860 ===============================================================================
1861 Private Methods
1862 ===============================================================================
1863 */
1864 
1865 
1866 
1867 
1868 /*
1869 ===============================================================================
1870 Structors
1871 ===============================================================================
1872 */
JEGAOptimizer(ProblemDescDB & problem_db,Model & model)1873 JEGAOptimizer::JEGAOptimizer(
1874     ProblemDescDB& problem_db, Model& model
1875     ) :
1876         //Optimizer(problem_db, model, std::shared_ptr<TraitsBase>(new JEGATraits())),
1877         Optimizer(problem_db, model, std::shared_ptr<TraitsBase>(new JEGATraits())),
1878         _theParamDB(0x0),
1879         _theEvalCreator(0x0)
1880 {
1881     EDDY_FUNC_DEBUGSCOPE
1882 
1883     // JEGAOptimizer now makes use of the JEGA front end core project to run
1884     // an algorithm.  In order to do this, it creates and loads a DesignTarget,
1885     // a ProblemConfig, and an AlgorithmConfig.
1886 
1887     // The first step is to initialize JEGA via the front end Driver
1888     // class.  The data needed is available from the problem description
1889     // database.  This should only happen once in any run of Dakota regardless
1890     // of how many JEGAOptimizers are used.
1891     if(!JEGA::FrontEnd::Driver::IsJEGAInitialized())
1892     {
1893         // The random seed must be handled separately because the sentry value
1894         // for JEGA (0) is not the same as the sentry value for Dakota (-1).
1895         int rseed_temp = this->probDescDB.get_int("method.random_seed");
1896 
1897         // if the rseed is negative, it is the sentry value and we will use the
1898         // JEGA sentry value of 0.
1899         unsigned int rSeed = (rseed_temp < 0) ? 0 :
1900             static_cast<unsigned int>(rseed_temp);
1901 
1902         // For now, we will use the level of the first instance of an optimizer
1903         // as the level for the global log.  This is only potentially not ideal
1904         // in the case of strategies.  The 4 - below is to account for the fact
1905         // that the actual dakota levels count upwards by increasing amount of
1906         // output while the dakota_levels must count downwards in order to be
1907         // compatable with the logging library code.
1908         short dakLev = this->probDescDB.get_short("method.output");
1909         LogLevel jegaLev;
1910 
1911         switch (dakLev)
1912         {
1913             case SILENT_OUTPUT: jegaLev = lsilent(); break;
1914             case NORMAL_OUTPUT: jegaLev = lnormal(); break;
1915             case DEBUG_OUTPUT: jegaLev = ldebug(); break;
1916             case QUIET_OUTPUT: jegaLev = lquiet(); break;
1917             case VERBOSE_OUTPUT: jegaLev = lverbose(); break;
1918             default: jegaLev = ldefault();
1919         }
1920 
1921 	// We use JEGA as a library, so want signals to raise up to
1922 	// us, lest they get ignored:
1923 	const bool jega_register_signals = false;
1924         JEGA::FrontEnd::Driver::InitializeJEGA(
1925 	    "JEGAGlobal.log", jegaLev, rSeed, JEGA::Logging::Logger::ABORT,
1926 	    jega_register_signals
1927             );
1928     }
1929 
1930     // If we failed to init, we cannot continue.
1931     JEGAIFLOG_II_G_F(!JEGA::FrontEnd::Driver::IsJEGAInitialized(), this,
1932         text_entry(lfatal(), "JEGAOptimizer Error: Unable to initialize JEGA")
1933         );
1934 
1935     // we only need to load up the parameter database at this point.
1936     this->LoadTheParameterDatabase();
1937 
1938     // population_size is extracted by JEGA in
1939     // GeneticAlgorithmInitializer::PollForParameters(), but it is
1940     // also needed here to specify the algorithmic concurrency.  Note
1941     // that the JEGA population size may grow or shrink during its
1942     // iterations, so this is only an initial estimate.
1943     int pop_size = this->probDescDB.get_int("method.population_size");
1944     this->maxEvalConcurrency *= pop_size;
1945 
1946     // Assign iterator-specific default for numFinalSolutions
1947     if (methodName == MOGA && !this->numFinalSolutions)
1948       this->numFinalSolutions
1949 	= std::numeric_limits<std::size_t>::max(); // moga returns all Pareto
1950 
1951     // We only ever need one EvaluatorCreator so we can create it now.
1952     this->_theEvalCreator = new EvaluatorCreator(iteratedModel);
1953 }
1954 
~JEGAOptimizer()1955 JEGAOptimizer::~JEGAOptimizer(
1956     )
1957 {
1958     EDDY_FUNC_DEBUGSCOPE
1959 
1960     delete this->_theEvalCreator;
1961     delete this->_theParamDB;
1962 }
1963 
1964 
1965 /*
1966 ===============================================================================
1967 Inner Class Implementations
1968 ===============================================================================
1969 */
1970 void
SeparateVariables(const Design & from,RealVector & intoCont,IntVector & intoDiscInt,RealVector & intoDiscReal,StringMultiArray & intoDiscString) const1971 JEGAOptimizer::Evaluator::SeparateVariables(
1972     const Design& from,
1973     RealVector& intoCont,
1974     IntVector&  intoDiscInt,
1975     RealVector& intoDiscReal,
1976     StringMultiArray& intoDiscString
1977     ) const
1978 {
1979     EDDY_FUNC_DEBUGSCOPE
1980 
1981     size_t num_cv  = this->_model.cv(), num_div = this->_model.div(),
1982            num_drv = this->_model.drv(), num_dsv = this->_model.dsv();
1983 
1984     // "into" containers may not yet be sized. If not, size them.  If they are,
1985     // don't size them b/c it will be a lot of wasted effort.
1986     if(intoCont.length()     != num_cv)  intoCont.size(num_cv);
1987     if(intoDiscInt.length()  != num_div) intoDiscInt.size(num_div);
1988     if(intoDiscReal.length() != num_drv) intoDiscReal.size(num_drv);
1989     // Strings are multi_arrays, not vectors
1990     if(intoDiscString.num_elements() != num_dsv) {
1991       StringMultiArray::extent_gen extents;
1992       intoDiscString.resize(extents[num_dsv]);
1993     }
1994 
1995     // Because we cannot easily distinguish real from integral variables
1996     // (true of both continuum and discrete), we will rely on the fact that
1997     // the infos are stored in the order in which we added them which is the
1998     // order laid out in JEGAOptimizer::LoadTheDesignVariables.  We will split
1999     // them up accordingly.  We need the design variable infos to get variable
2000     // values out of the design.
2001     const DesignTarget& target = from.GetDesignTarget();
2002     const DesignVariableInfoVector& dvis = target.GetDesignVariableInfos();
2003 
2004 //PDH: I think there's something that can be done here with regard to
2005 //     mapping discrete variables, but I don't know what the JEGA data
2006 //     structures are at the moment.
2007 
2008     // We will be marching through the dvis and need to keep track of were we
2009     // are from loop to loop.
2010     size_t i, dvi_cntr = 0;
2011 
2012     // Start with the DAKOTA continuous variables.
2013     for(i=0; i<num_cv; ++i, ++dvi_cntr)
2014     {
2015       EDDY_ASSERT(dvis[dvi_cntr]->IsContinuum());
2016       intoCont[i] = dvis[dvi_cntr]->WhichValue(from);
2017     }
2018 
2019     // Move on to the DAKOTA discrete integer {range,set} variables.
2020     const BitArray& di_set_bits = this->_model.discrete_int_sets();
2021     for(i=0; i<num_div; ++i, ++dvi_cntr)
2022     {
2023       if (di_set_bits[i]) { // set variables are discrete nature in JEGA
2024         EDDY_ASSERT(dvis[dvi_cntr]->IsDiscrete());
2025       }
2026       else { // range variables are continuum nature in JEGA
2027         EDDY_ASSERT(dvis[dvi_cntr]->IsContinuum());
2028       }
2029       intoDiscInt[i] = static_cast<int>(dvis[dvi_cntr]->WhichValue(from));
2030     }
2031 
2032     // Next process the "discrete set of real" variables.
2033     // These will also be discrete nature in JEGA.
2034     for(i=0; i<num_drv; ++i, ++dvi_cntr)
2035     {
2036       EDDY_ASSERT(dvis[dvi_cntr]->IsDiscrete());
2037       intoDiscReal[i] = dvis[dvi_cntr]->WhichValue(from);
2038     }
2039     // Finally, process the "discrete set of string" variables.
2040     // These will also be discrete in JEGA, and must be mapped
2041     // back to their associated string values.
2042     const StringSetArray& dssv_values = _model.discrete_set_string_values();
2043     for(i=0; i<num_dsv; ++i, ++dvi_cntr)
2044     {
2045       EDDY_ASSERT(dvis[dvi_cntr]->IsDiscrete());
2046       const int &element_index = static_cast<int>(dvis[dvi_cntr]->WhichValue(from));
2047       intoDiscString[i] = set_index_to_value(element_index, dssv_values[i]);
2048     }
2049 }
2050 
2051 void
RecordResponses(const RealVector & from,Design & into) const2052 JEGAOptimizer::Evaluator::RecordResponses(
2053     const RealVector& from,
2054     Design& into
2055     ) const
2056 {
2057     EDDY_FUNC_DEBUGSCOPE
2058 
2059     // get the target for information.
2060     const DesignTarget& target = this->GetDesignTarget();
2061 
2062     // get the information about the constraints.
2063     const ConstraintInfoVector& cnis = target.GetConstraintInfos();
2064 
2065     // prepare to store the location in the responses vector.
2066     RealVector::ordinalType loc = 0;
2067 
2068 //PDH: I think this is going from Dakota responses to JEGA responses,
2069 //     e.g., after a function evaluation.
2070 
2071     // find out how many objective and constraint functions there are.
2072     const size_t nof = target.GetNOF();
2073     const size_t ncn = target.GetNCN();
2074 
2075     // record the objective functions first.
2076     for(size_t i=0; i<nof; ++i, ++loc)
2077         into.SetObjective(i, from[loc]);
2078 
2079     // now record the nonlinear constraints.  To do this,
2080     // we will need to know how many there are.  They will be the
2081     // first of the constraints in the design.
2082     const size_t num_nonlin_cn = this->GetNumberNonLinearConstraints();
2083     for(size_t cn=0; cn<num_nonlin_cn && cn<ncn; ++cn, ++loc)
2084     {
2085         into.SetConstraint(cn, from[loc]);
2086         cnis[cn]->RecordViolation(into);
2087     }
2088 }
2089 
2090 bool
Evaluate(DesignGroup & group)2091 JEGAOptimizer::Evaluator::Evaluate(
2092     DesignGroup& group
2093     )
2094 {
2095     EDDY_FUNC_DEBUGSCOPE
2096 
2097     JEGALOG_II(this->GetLogger(), ldebug(), this,
2098         text_entry(ldebug(), this->GetName() + ": Performing group evaluation.")
2099         )
2100 
2101     // check for trivial abort conditions
2102     if(group.IsEmpty()) return true;
2103 
2104     // first, let's see if we can avoid any evaluations.
2105     ResolveClones(group);
2106 
2107     // we'll prepare containers for repeated use without re-construction
2108     RealVector       contVars;
2109     IntVector        discIntVars;
2110     RealVector       discRealVars;
2111     StringMultiArray discStringVars;
2112 
2113     // prepare to iterate over the group
2114     DesignDVSortSet::const_iterator it(group.BeginDV());
2115     const DesignDVSortSet::const_iterator e(group.EndDV());
2116 
2117     // these quantities will be used below
2118     const DesignTarget& target = this->GetDesignTarget();
2119 
2120     // Find out the counts on the different types of constraints
2121     const size_t num_nonlin_cn = this->GetNumberNonLinearConstraints();
2122     // const size_t num_lin_cn = this->GetNumberLinearConstraints();
2123 
2124     // Get the information about the constraints.
2125     const ConstraintInfoVector& cninfos = target.GetConstraintInfos();
2126 
2127     // Prepare to store the number of requests in order to avoid overshooting
2128     // the limit.
2129     const eddy::utilities::uint64_t priorReqs = this->GetNumberEvaluations();
2130     eddy::utilities::uint64_t numEvalReqs = 0;
2131 
2132     // store an iterator to the first linear constraint so that
2133     // we can evaluate it using the cinfo objects.
2134 
2135     // prepare to iterate
2136     ConstraintInfoVector::const_iterator flincn(
2137         cninfos.begin() + num_nonlin_cn
2138         );
2139     ConstraintInfoVector::const_iterator cit;
2140 
2141     // prepare to return the success of this.  Success occurs only if all
2142     // designs wind up evaluated and non-illconditioned.
2143     bool ret = true;
2144 
2145     for(; it!=e; ++it)
2146     {
2147         // If this Design is evaluated, let's skip it.
2148         if((*it)->IsEvaluated()) continue;
2149 
2150         // If we've reached our maximum allowable number of
2151         // evaluations, tag remaining as evaluated and illconditioned.
2152         // By doing so, they will be flushed from the algorithm.
2153         if((priorReqs + numEvalReqs) >= this->GetMaxEvaluations())
2154         {
2155             (*it)->SetEvaluated(true);
2156             (*it)->SetIllconditioned(true);
2157             ret = false;
2158             continue;
2159         }
2160 
2161         // extract the real and continuous variables
2162         // from the current Design
2163         this->SeparateVariables(**it, contVars, discIntVars, discRealVars,
2164 	    discStringVars);
2165 
2166         // send this guy out for evaluation using the _model.
2167 
2168         // first, set the current values of the variables in the model
2169         this->_model.continuous_variables(contVars);
2170         this->_model.discrete_int_variables(discIntVars);
2171         this->_model.discrete_real_variables(discRealVars);
2172 	// Strings set by calling single value setter for each
2173 	for (size_t i=0; i<discStringVars.num_elements(); ++i)
2174 	  this->_model.discrete_string_variable(discStringVars[i],i);
2175 	// Could use discrete_string_varables to avoid overhead of repeated
2176 	// function calls, but it takes a StringMultiArrayConstView, which
2177 	// must be created from discStringVars. Maybe there's a simpler way,
2178 	// but...
2179 	// const size_t &dsv_len = discStringVars.num_elements();
2180 	// StringMultiArrayConstView dsv_view = discStringVars[
2181 	//   boost::indices[idx_range(0,dsv_len)]];
2182         // this->_model.discrete_string_variables(dsv_view);
2183 
2184         // now request the evaluation in synchronous or asyncronous mode.
2185         if(this->_model.asynch_flag())
2186         {
2187             // The following method call will use the default
2188             // Active set vector which is to just compute
2189             // function values, no gradients or hessians.
2190             this->_model.evaluate_nowait();
2191         }
2192         else
2193         {
2194             // The following method call will use the default
2195             // Active set vector which is to just compute
2196             // function values, no gradients or hessians.
2197             this->_model.evaluate();
2198 
2199             // increment the number of performed evaluations by 1
2200             this->IncrementNumberEvaluations();
2201 
2202             // Record the responses back into the Design
2203             const RealVector& ftn_vals =
2204                 this->_model.current_response().function_values();
2205 
2206             this->RecordResponses(ftn_vals, **it);
2207 
2208             // Label this guy as now being evaluated.
2209             (*it)->SetEvaluated(true);
2210 
2211             // now check the feasibility of this design
2212             target.CheckFeasibility(**it);
2213         }
2214 
2215         // no matter what, we want to increment the number of evaluations
2216         // or evaluation requests.
2217         ++numEvalReqs;
2218 
2219         // The responses do not (or will not) include the linear
2220         // constraint values. We have to compute them ourselves.
2221         // we can do it using the "EvaluateConstraint" method which
2222         // will only succeed for linear constraints for which
2223         // coefficients have been supplied.
2224         for(cit=flincn; cit!=cninfos.end(); ++cit)
2225         {
2226             (*cit)->EvaluateConstraint(**it);
2227             (*cit)->RecordViolation(**it);
2228         }
2229     }
2230 
2231     // If we did our evaluations asynchronously, we did not yet record
2232     // the results (because they were not available).  We need to do so
2233     // now.  The call to _model.synchronize causes the program to block
2234     // until all the results are available.  We can then record the
2235     // responses in the same fashion as above.  Note that the linear
2236     // constraints have already been computed!!
2237     if(this->_model.asynch_flag())
2238     {
2239         // Wait for the responses.
2240         const IntResponseMap& response_map = this->_model.synchronize();
2241         size_t num_resp = response_map.size();
2242 
2243         // increment the number of evaluations by the number of responses
2244         this->IncrementNumberEvaluations(num_resp);
2245 
2246         EDDY_ASSERT(num_resp == numEvalReqs);
2247 
2248         // prepare to access the elements of the response_map by iterator.
2249         IntRespMCIter r_cit = response_map.begin();
2250 
2251         // Record the set of responses in the DesignGroup
2252         for(it=group.BeginDV(); it!=e; ++it)
2253         {
2254             // we didn't send already-evaluated Designs out for evaluation
2255             // so skip them here as well.
2256             if((*it)->IsEvaluated()) continue;
2257 
2258             // Put the responses into the Design properly.
2259             this->RecordResponses(r_cit->second.function_values(), **it);
2260 
2261             // Label this guy as now being evaluated.
2262             (*it)->SetEvaluated(true);
2263 
2264             // now check the feasibility of this design
2265             target.CheckFeasibility(**it);
2266 
2267 	    // only increment for newly evaluated points contained in response_map
2268 	    ++r_cit;
2269         }
2270     }
2271 
2272     return ret;
2273 }
2274 
2275 /*
2276 ===============================================================================
2277 End Namespace
2278 ===============================================================================
2279 */
2280 } // namespace Dakota
2281