1 /**
2  * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3  * SPDX-License-Identifier: Apache-2.0.
4  */
5 
6 #pragma once
7 #include <aws/sagemaker/SageMaker_EXPORTS.h>
8 #include <aws/core/utils/memory/stl/AWSVector.h>
9 #include <aws/sagemaker/model/HyperParameterTuningJobWarmStartType.h>
10 #include <aws/sagemaker/model/ParentHyperParameterTuningJob.h>
11 #include <utility>
12 
13 namespace Aws
14 {
15 namespace Utils
16 {
17 namespace Json
18 {
19   class JsonValue;
20   class JsonView;
21 } // namespace Json
22 } // namespace Utils
23 namespace SageMaker
24 {
25 namespace Model
26 {
27 
28   /**
29    * <p>Specifies the configuration for a hyperparameter tuning job that uses one or
30    * more previous hyperparameter tuning jobs as a starting point. The results of
31    * previous tuning jobs are used to inform which combinations of hyperparameters to
32    * search over in the new tuning job.</p> <p>All training jobs launched by the new
33    * hyperparameter tuning job are evaluated by using the objective metric, and the
34    * training job that performs the best is compared to the best training jobs from
35    * the parent tuning jobs. From these, the training job that performs the best as
36    * measured by the objective metric is returned as the overall best training
37    * job.</p>  <p>All training jobs launched by parent hyperparameter tuning
38    * jobs and the new hyperparameter tuning jobs count against the limit of training
39    * jobs for the tuning job.</p> <p><h3>See Also:</h3>   <a
40    * href="http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/HyperParameterTuningJobWarmStartConfig">AWS
41    * API Reference</a></p>
42    */
43   class AWS_SAGEMAKER_API HyperParameterTuningJobWarmStartConfig
44   {
45   public:
46     HyperParameterTuningJobWarmStartConfig();
47     HyperParameterTuningJobWarmStartConfig(Aws::Utils::Json::JsonView jsonValue);
48     HyperParameterTuningJobWarmStartConfig& operator=(Aws::Utils::Json::JsonView jsonValue);
49     Aws::Utils::Json::JsonValue Jsonize() const;
50 
51 
52     /**
53      * <p>An array of hyperparameter tuning jobs that are used as the starting point
54      * for the new hyperparameter tuning job. For more information about warm starting
55      * a hyperparameter tuning job, see <a
56      * href="https://docs.aws.amazon.com/sagemaker/latest/dg/automatic-model-tuning-warm-start.html">Using
57      * a Previous Hyperparameter Tuning Job as a Starting Point</a>.</p>
58      * <p>Hyperparameter tuning jobs created before October 1, 2018 cannot be used as
59      * parent jobs for warm start tuning jobs.</p>
60      */
GetParentHyperParameterTuningJobs()61     inline const Aws::Vector<ParentHyperParameterTuningJob>& GetParentHyperParameterTuningJobs() const{ return m_parentHyperParameterTuningJobs; }
62 
63     /**
64      * <p>An array of hyperparameter tuning jobs that are used as the starting point
65      * for the new hyperparameter tuning job. For more information about warm starting
66      * a hyperparameter tuning job, see <a
67      * href="https://docs.aws.amazon.com/sagemaker/latest/dg/automatic-model-tuning-warm-start.html">Using
68      * a Previous Hyperparameter Tuning Job as a Starting Point</a>.</p>
69      * <p>Hyperparameter tuning jobs created before October 1, 2018 cannot be used as
70      * parent jobs for warm start tuning jobs.</p>
71      */
ParentHyperParameterTuningJobsHasBeenSet()72     inline bool ParentHyperParameterTuningJobsHasBeenSet() const { return m_parentHyperParameterTuningJobsHasBeenSet; }
73 
74     /**
75      * <p>An array of hyperparameter tuning jobs that are used as the starting point
76      * for the new hyperparameter tuning job. For more information about warm starting
77      * a hyperparameter tuning job, see <a
78      * href="https://docs.aws.amazon.com/sagemaker/latest/dg/automatic-model-tuning-warm-start.html">Using
79      * a Previous Hyperparameter Tuning Job as a Starting Point</a>.</p>
80      * <p>Hyperparameter tuning jobs created before October 1, 2018 cannot be used as
81      * parent jobs for warm start tuning jobs.</p>
82      */
SetParentHyperParameterTuningJobs(const Aws::Vector<ParentHyperParameterTuningJob> & value)83     inline void SetParentHyperParameterTuningJobs(const Aws::Vector<ParentHyperParameterTuningJob>& value) { m_parentHyperParameterTuningJobsHasBeenSet = true; m_parentHyperParameterTuningJobs = value; }
84 
85     /**
86      * <p>An array of hyperparameter tuning jobs that are used as the starting point
87      * for the new hyperparameter tuning job. For more information about warm starting
88      * a hyperparameter tuning job, see <a
89      * href="https://docs.aws.amazon.com/sagemaker/latest/dg/automatic-model-tuning-warm-start.html">Using
90      * a Previous Hyperparameter Tuning Job as a Starting Point</a>.</p>
91      * <p>Hyperparameter tuning jobs created before October 1, 2018 cannot be used as
92      * parent jobs for warm start tuning jobs.</p>
93      */
SetParentHyperParameterTuningJobs(Aws::Vector<ParentHyperParameterTuningJob> && value)94     inline void SetParentHyperParameterTuningJobs(Aws::Vector<ParentHyperParameterTuningJob>&& value) { m_parentHyperParameterTuningJobsHasBeenSet = true; m_parentHyperParameterTuningJobs = std::move(value); }
95 
96     /**
97      * <p>An array of hyperparameter tuning jobs that are used as the starting point
98      * for the new hyperparameter tuning job. For more information about warm starting
99      * a hyperparameter tuning job, see <a
100      * href="https://docs.aws.amazon.com/sagemaker/latest/dg/automatic-model-tuning-warm-start.html">Using
101      * a Previous Hyperparameter Tuning Job as a Starting Point</a>.</p>
102      * <p>Hyperparameter tuning jobs created before October 1, 2018 cannot be used as
103      * parent jobs for warm start tuning jobs.</p>
104      */
WithParentHyperParameterTuningJobs(const Aws::Vector<ParentHyperParameterTuningJob> & value)105     inline HyperParameterTuningJobWarmStartConfig& WithParentHyperParameterTuningJobs(const Aws::Vector<ParentHyperParameterTuningJob>& value) { SetParentHyperParameterTuningJobs(value); return *this;}
106 
107     /**
108      * <p>An array of hyperparameter tuning jobs that are used as the starting point
109      * for the new hyperparameter tuning job. For more information about warm starting
110      * a hyperparameter tuning job, see <a
111      * href="https://docs.aws.amazon.com/sagemaker/latest/dg/automatic-model-tuning-warm-start.html">Using
112      * a Previous Hyperparameter Tuning Job as a Starting Point</a>.</p>
113      * <p>Hyperparameter tuning jobs created before October 1, 2018 cannot be used as
114      * parent jobs for warm start tuning jobs.</p>
115      */
WithParentHyperParameterTuningJobs(Aws::Vector<ParentHyperParameterTuningJob> && value)116     inline HyperParameterTuningJobWarmStartConfig& WithParentHyperParameterTuningJobs(Aws::Vector<ParentHyperParameterTuningJob>&& value) { SetParentHyperParameterTuningJobs(std::move(value)); return *this;}
117 
118     /**
119      * <p>An array of hyperparameter tuning jobs that are used as the starting point
120      * for the new hyperparameter tuning job. For more information about warm starting
121      * a hyperparameter tuning job, see <a
122      * href="https://docs.aws.amazon.com/sagemaker/latest/dg/automatic-model-tuning-warm-start.html">Using
123      * a Previous Hyperparameter Tuning Job as a Starting Point</a>.</p>
124      * <p>Hyperparameter tuning jobs created before October 1, 2018 cannot be used as
125      * parent jobs for warm start tuning jobs.</p>
126      */
AddParentHyperParameterTuningJobs(const ParentHyperParameterTuningJob & value)127     inline HyperParameterTuningJobWarmStartConfig& AddParentHyperParameterTuningJobs(const ParentHyperParameterTuningJob& value) { m_parentHyperParameterTuningJobsHasBeenSet = true; m_parentHyperParameterTuningJobs.push_back(value); return *this; }
128 
129     /**
130      * <p>An array of hyperparameter tuning jobs that are used as the starting point
131      * for the new hyperparameter tuning job. For more information about warm starting
132      * a hyperparameter tuning job, see <a
133      * href="https://docs.aws.amazon.com/sagemaker/latest/dg/automatic-model-tuning-warm-start.html">Using
134      * a Previous Hyperparameter Tuning Job as a Starting Point</a>.</p>
135      * <p>Hyperparameter tuning jobs created before October 1, 2018 cannot be used as
136      * parent jobs for warm start tuning jobs.</p>
137      */
AddParentHyperParameterTuningJobs(ParentHyperParameterTuningJob && value)138     inline HyperParameterTuningJobWarmStartConfig& AddParentHyperParameterTuningJobs(ParentHyperParameterTuningJob&& value) { m_parentHyperParameterTuningJobsHasBeenSet = true; m_parentHyperParameterTuningJobs.push_back(std::move(value)); return *this; }
139 
140 
141     /**
142      * <p>Specifies one of the following:</p> <dl>
143      * <dt>IDENTICAL_DATA_AND_ALGORITHM</dt> <dd> <p>The new hyperparameter tuning job
144      * uses the same input data and training image as the parent tuning jobs. You can
145      * change the hyperparameter ranges to search and the maximum number of training
146      * jobs that the hyperparameter tuning job launches. You cannot use a new version
147      * of the training algorithm, unless the changes in the new version do not affect
148      * the algorithm itself. For example, changes that improve logging or adding
149      * support for a different data format are allowed. You can also change
150      * hyperparameters from tunable to static, and from static to tunable, but the
151      * total number of static plus tunable hyperparameters must remain the same as it
152      * is in all parent jobs. The objective metric for the new tuning job must be the
153      * same as for all parent jobs.</p> </dd> <dt>TRANSFER_LEARNING</dt> <dd> <p>The
154      * new hyperparameter tuning job can include input data, hyperparameter ranges,
155      * maximum number of concurrent training jobs, and maximum number of training jobs
156      * that are different than those of its parent hyperparameter tuning jobs. The
157      * training image can also be a different version from the version used in the
158      * parent hyperparameter tuning job. You can also change hyperparameters from
159      * tunable to static, and from static to tunable, but the total number of static
160      * plus tunable hyperparameters must remain the same as it is in all parent jobs.
161      * The objective metric for the new tuning job must be the same as for all parent
162      * jobs.</p> </dd> </dl>
163      */
GetWarmStartType()164     inline const HyperParameterTuningJobWarmStartType& GetWarmStartType() const{ return m_warmStartType; }
165 
166     /**
167      * <p>Specifies one of the following:</p> <dl>
168      * <dt>IDENTICAL_DATA_AND_ALGORITHM</dt> <dd> <p>The new hyperparameter tuning job
169      * uses the same input data and training image as the parent tuning jobs. You can
170      * change the hyperparameter ranges to search and the maximum number of training
171      * jobs that the hyperparameter tuning job launches. You cannot use a new version
172      * of the training algorithm, unless the changes in the new version do not affect
173      * the algorithm itself. For example, changes that improve logging or adding
174      * support for a different data format are allowed. You can also change
175      * hyperparameters from tunable to static, and from static to tunable, but the
176      * total number of static plus tunable hyperparameters must remain the same as it
177      * is in all parent jobs. The objective metric for the new tuning job must be the
178      * same as for all parent jobs.</p> </dd> <dt>TRANSFER_LEARNING</dt> <dd> <p>The
179      * new hyperparameter tuning job can include input data, hyperparameter ranges,
180      * maximum number of concurrent training jobs, and maximum number of training jobs
181      * that are different than those of its parent hyperparameter tuning jobs. The
182      * training image can also be a different version from the version used in the
183      * parent hyperparameter tuning job. You can also change hyperparameters from
184      * tunable to static, and from static to tunable, but the total number of static
185      * plus tunable hyperparameters must remain the same as it is in all parent jobs.
186      * The objective metric for the new tuning job must be the same as for all parent
187      * jobs.</p> </dd> </dl>
188      */
WarmStartTypeHasBeenSet()189     inline bool WarmStartTypeHasBeenSet() const { return m_warmStartTypeHasBeenSet; }
190 
191     /**
192      * <p>Specifies one of the following:</p> <dl>
193      * <dt>IDENTICAL_DATA_AND_ALGORITHM</dt> <dd> <p>The new hyperparameter tuning job
194      * uses the same input data and training image as the parent tuning jobs. You can
195      * change the hyperparameter ranges to search and the maximum number of training
196      * jobs that the hyperparameter tuning job launches. You cannot use a new version
197      * of the training algorithm, unless the changes in the new version do not affect
198      * the algorithm itself. For example, changes that improve logging or adding
199      * support for a different data format are allowed. You can also change
200      * hyperparameters from tunable to static, and from static to tunable, but the
201      * total number of static plus tunable hyperparameters must remain the same as it
202      * is in all parent jobs. The objective metric for the new tuning job must be the
203      * same as for all parent jobs.</p> </dd> <dt>TRANSFER_LEARNING</dt> <dd> <p>The
204      * new hyperparameter tuning job can include input data, hyperparameter ranges,
205      * maximum number of concurrent training jobs, and maximum number of training jobs
206      * that are different than those of its parent hyperparameter tuning jobs. The
207      * training image can also be a different version from the version used in the
208      * parent hyperparameter tuning job. You can also change hyperparameters from
209      * tunable to static, and from static to tunable, but the total number of static
210      * plus tunable hyperparameters must remain the same as it is in all parent jobs.
211      * The objective metric for the new tuning job must be the same as for all parent
212      * jobs.</p> </dd> </dl>
213      */
SetWarmStartType(const HyperParameterTuningJobWarmStartType & value)214     inline void SetWarmStartType(const HyperParameterTuningJobWarmStartType& value) { m_warmStartTypeHasBeenSet = true; m_warmStartType = value; }
215 
216     /**
217      * <p>Specifies one of the following:</p> <dl>
218      * <dt>IDENTICAL_DATA_AND_ALGORITHM</dt> <dd> <p>The new hyperparameter tuning job
219      * uses the same input data and training image as the parent tuning jobs. You can
220      * change the hyperparameter ranges to search and the maximum number of training
221      * jobs that the hyperparameter tuning job launches. You cannot use a new version
222      * of the training algorithm, unless the changes in the new version do not affect
223      * the algorithm itself. For example, changes that improve logging or adding
224      * support for a different data format are allowed. You can also change
225      * hyperparameters from tunable to static, and from static to tunable, but the
226      * total number of static plus tunable hyperparameters must remain the same as it
227      * is in all parent jobs. The objective metric for the new tuning job must be the
228      * same as for all parent jobs.</p> </dd> <dt>TRANSFER_LEARNING</dt> <dd> <p>The
229      * new hyperparameter tuning job can include input data, hyperparameter ranges,
230      * maximum number of concurrent training jobs, and maximum number of training jobs
231      * that are different than those of its parent hyperparameter tuning jobs. The
232      * training image can also be a different version from the version used in the
233      * parent hyperparameter tuning job. You can also change hyperparameters from
234      * tunable to static, and from static to tunable, but the total number of static
235      * plus tunable hyperparameters must remain the same as it is in all parent jobs.
236      * The objective metric for the new tuning job must be the same as for all parent
237      * jobs.</p> </dd> </dl>
238      */
SetWarmStartType(HyperParameterTuningJobWarmStartType && value)239     inline void SetWarmStartType(HyperParameterTuningJobWarmStartType&& value) { m_warmStartTypeHasBeenSet = true; m_warmStartType = std::move(value); }
240 
241     /**
242      * <p>Specifies one of the following:</p> <dl>
243      * <dt>IDENTICAL_DATA_AND_ALGORITHM</dt> <dd> <p>The new hyperparameter tuning job
244      * uses the same input data and training image as the parent tuning jobs. You can
245      * change the hyperparameter ranges to search and the maximum number of training
246      * jobs that the hyperparameter tuning job launches. You cannot use a new version
247      * of the training algorithm, unless the changes in the new version do not affect
248      * the algorithm itself. For example, changes that improve logging or adding
249      * support for a different data format are allowed. You can also change
250      * hyperparameters from tunable to static, and from static to tunable, but the
251      * total number of static plus tunable hyperparameters must remain the same as it
252      * is in all parent jobs. The objective metric for the new tuning job must be the
253      * same as for all parent jobs.</p> </dd> <dt>TRANSFER_LEARNING</dt> <dd> <p>The
254      * new hyperparameter tuning job can include input data, hyperparameter ranges,
255      * maximum number of concurrent training jobs, and maximum number of training jobs
256      * that are different than those of its parent hyperparameter tuning jobs. The
257      * training image can also be a different version from the version used in the
258      * parent hyperparameter tuning job. You can also change hyperparameters from
259      * tunable to static, and from static to tunable, but the total number of static
260      * plus tunable hyperparameters must remain the same as it is in all parent jobs.
261      * The objective metric for the new tuning job must be the same as for all parent
262      * jobs.</p> </dd> </dl>
263      */
WithWarmStartType(const HyperParameterTuningJobWarmStartType & value)264     inline HyperParameterTuningJobWarmStartConfig& WithWarmStartType(const HyperParameterTuningJobWarmStartType& value) { SetWarmStartType(value); return *this;}
265 
266     /**
267      * <p>Specifies one of the following:</p> <dl>
268      * <dt>IDENTICAL_DATA_AND_ALGORITHM</dt> <dd> <p>The new hyperparameter tuning job
269      * uses the same input data and training image as the parent tuning jobs. You can
270      * change the hyperparameter ranges to search and the maximum number of training
271      * jobs that the hyperparameter tuning job launches. You cannot use a new version
272      * of the training algorithm, unless the changes in the new version do not affect
273      * the algorithm itself. For example, changes that improve logging or adding
274      * support for a different data format are allowed. You can also change
275      * hyperparameters from tunable to static, and from static to tunable, but the
276      * total number of static plus tunable hyperparameters must remain the same as it
277      * is in all parent jobs. The objective metric for the new tuning job must be the
278      * same as for all parent jobs.</p> </dd> <dt>TRANSFER_LEARNING</dt> <dd> <p>The
279      * new hyperparameter tuning job can include input data, hyperparameter ranges,
280      * maximum number of concurrent training jobs, and maximum number of training jobs
281      * that are different than those of its parent hyperparameter tuning jobs. The
282      * training image can also be a different version from the version used in the
283      * parent hyperparameter tuning job. You can also change hyperparameters from
284      * tunable to static, and from static to tunable, but the total number of static
285      * plus tunable hyperparameters must remain the same as it is in all parent jobs.
286      * The objective metric for the new tuning job must be the same as for all parent
287      * jobs.</p> </dd> </dl>
288      */
WithWarmStartType(HyperParameterTuningJobWarmStartType && value)289     inline HyperParameterTuningJobWarmStartConfig& WithWarmStartType(HyperParameterTuningJobWarmStartType&& value) { SetWarmStartType(std::move(value)); return *this;}
290 
291   private:
292 
293     Aws::Vector<ParentHyperParameterTuningJob> m_parentHyperParameterTuningJobs;
294     bool m_parentHyperParameterTuningJobsHasBeenSet;
295 
296     HyperParameterTuningJobWarmStartType m_warmStartType;
297     bool m_warmStartTypeHasBeenSet;
298   };
299 
300 } // namespace Model
301 } // namespace SageMaker
302 } // namespace Aws
303