1// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
2
3package mediaconvert
4
5import (
6	"fmt"
7	"time"
8
9	"github.com/aws/aws-sdk-go/aws"
10	"github.com/aws/aws-sdk-go/aws/awsutil"
11	"github.com/aws/aws-sdk-go/aws/request"
12	"github.com/aws/aws-sdk-go/private/protocol"
13	"github.com/aws/aws-sdk-go/private/protocol/restjson"
14)
15
16const opAssociateCertificate = "AssociateCertificate"
17
18// AssociateCertificateRequest generates a "aws/request.Request" representing the
19// client's request for the AssociateCertificate operation. The "output" return
20// value will be populated with the request's response once the request completes
21// successfully.
22//
23// Use "Send" method on the returned Request to send the API call to the service.
24// the "output" return value is not valid until after Send returns without error.
25//
26// See AssociateCertificate for more information on using the AssociateCertificate
27// API call, and error handling.
28//
29// This method is useful when you want to inject custom logic or configuration
30// into the SDK's request lifecycle. Such as custom headers, or retry logic.
31//
32//
33//    // Example sending a request using the AssociateCertificateRequest method.
34//    req, resp := client.AssociateCertificateRequest(params)
35//
36//    err := req.Send()
37//    if err == nil { // resp is now filled
38//        fmt.Println(resp)
39//    }
40//
41// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/AssociateCertificate
42func (c *MediaConvert) AssociateCertificateRequest(input *AssociateCertificateInput) (req *request.Request, output *AssociateCertificateOutput) {
43	op := &request.Operation{
44		Name:       opAssociateCertificate,
45		HTTPMethod: "POST",
46		HTTPPath:   "/2017-08-29/certificates",
47	}
48
49	if input == nil {
50		input = &AssociateCertificateInput{}
51	}
52
53	output = &AssociateCertificateOutput{}
54	req = c.newRequest(op, input, output)
55	req.Handlers.Unmarshal.Swap(restjson.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler)
56	return
57}
58
59// AssociateCertificate API operation for AWS Elemental MediaConvert.
60//
61// Associates an AWS Certificate Manager (ACM) Amazon Resource Name (ARN) with
62// AWS Elemental MediaConvert.
63//
64// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
65// with awserr.Error's Code and Message methods to get detailed information about
66// the error.
67//
68// See the AWS API reference guide for AWS Elemental MediaConvert's
69// API operation AssociateCertificate for usage and error information.
70//
71// Returned Error Types:
72//   * BadRequestException
73//
74//   * InternalServerErrorException
75//
76//   * ForbiddenException
77//
78//   * NotFoundException
79//
80//   * TooManyRequestsException
81//
82//   * ConflictException
83//
84// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/AssociateCertificate
85func (c *MediaConvert) AssociateCertificate(input *AssociateCertificateInput) (*AssociateCertificateOutput, error) {
86	req, out := c.AssociateCertificateRequest(input)
87	return out, req.Send()
88}
89
90// AssociateCertificateWithContext is the same as AssociateCertificate with the addition of
91// the ability to pass a context and additional request options.
92//
93// See AssociateCertificate for details on how to use this API operation.
94//
95// The context must be non-nil and will be used for request cancellation. If
96// the context is nil a panic will occur. In the future the SDK may create
97// sub-contexts for http.Requests. See https://golang.org/pkg/context/
98// for more information on using Contexts.
99func (c *MediaConvert) AssociateCertificateWithContext(ctx aws.Context, input *AssociateCertificateInput, opts ...request.Option) (*AssociateCertificateOutput, error) {
100	req, out := c.AssociateCertificateRequest(input)
101	req.SetContext(ctx)
102	req.ApplyOptions(opts...)
103	return out, req.Send()
104}
105
106const opCancelJob = "CancelJob"
107
108// CancelJobRequest generates a "aws/request.Request" representing the
109// client's request for the CancelJob operation. The "output" return
110// value will be populated with the request's response once the request completes
111// successfully.
112//
113// Use "Send" method on the returned Request to send the API call to the service.
114// the "output" return value is not valid until after Send returns without error.
115//
116// See CancelJob for more information on using the CancelJob
117// API call, and error handling.
118//
119// This method is useful when you want to inject custom logic or configuration
120// into the SDK's request lifecycle. Such as custom headers, or retry logic.
121//
122//
123//    // Example sending a request using the CancelJobRequest method.
124//    req, resp := client.CancelJobRequest(params)
125//
126//    err := req.Send()
127//    if err == nil { // resp is now filled
128//        fmt.Println(resp)
129//    }
130//
131// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/CancelJob
132func (c *MediaConvert) CancelJobRequest(input *CancelJobInput) (req *request.Request, output *CancelJobOutput) {
133	op := &request.Operation{
134		Name:       opCancelJob,
135		HTTPMethod: "DELETE",
136		HTTPPath:   "/2017-08-29/jobs/{id}",
137	}
138
139	if input == nil {
140		input = &CancelJobInput{}
141	}
142
143	output = &CancelJobOutput{}
144	req = c.newRequest(op, input, output)
145	req.Handlers.Unmarshal.Swap(restjson.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler)
146	return
147}
148
149// CancelJob API operation for AWS Elemental MediaConvert.
150//
151// Permanently cancel a job. Once you have canceled a job, you can't start it
152// again.
153//
154// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
155// with awserr.Error's Code and Message methods to get detailed information about
156// the error.
157//
158// See the AWS API reference guide for AWS Elemental MediaConvert's
159// API operation CancelJob for usage and error information.
160//
161// Returned Error Types:
162//   * BadRequestException
163//
164//   * InternalServerErrorException
165//
166//   * ForbiddenException
167//
168//   * NotFoundException
169//
170//   * TooManyRequestsException
171//
172//   * ConflictException
173//
174// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/CancelJob
175func (c *MediaConvert) CancelJob(input *CancelJobInput) (*CancelJobOutput, error) {
176	req, out := c.CancelJobRequest(input)
177	return out, req.Send()
178}
179
180// CancelJobWithContext is the same as CancelJob with the addition of
181// the ability to pass a context and additional request options.
182//
183// See CancelJob for details on how to use this API operation.
184//
185// The context must be non-nil and will be used for request cancellation. If
186// the context is nil a panic will occur. In the future the SDK may create
187// sub-contexts for http.Requests. See https://golang.org/pkg/context/
188// for more information on using Contexts.
189func (c *MediaConvert) CancelJobWithContext(ctx aws.Context, input *CancelJobInput, opts ...request.Option) (*CancelJobOutput, error) {
190	req, out := c.CancelJobRequest(input)
191	req.SetContext(ctx)
192	req.ApplyOptions(opts...)
193	return out, req.Send()
194}
195
196const opCreateJob = "CreateJob"
197
198// CreateJobRequest generates a "aws/request.Request" representing the
199// client's request for the CreateJob operation. The "output" return
200// value will be populated with the request's response once the request completes
201// successfully.
202//
203// Use "Send" method on the returned Request to send the API call to the service.
204// the "output" return value is not valid until after Send returns without error.
205//
206// See CreateJob for more information on using the CreateJob
207// API call, and error handling.
208//
209// This method is useful when you want to inject custom logic or configuration
210// into the SDK's request lifecycle. Such as custom headers, or retry logic.
211//
212//
213//    // Example sending a request using the CreateJobRequest method.
214//    req, resp := client.CreateJobRequest(params)
215//
216//    err := req.Send()
217//    if err == nil { // resp is now filled
218//        fmt.Println(resp)
219//    }
220//
221// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/CreateJob
222func (c *MediaConvert) CreateJobRequest(input *CreateJobInput) (req *request.Request, output *CreateJobOutput) {
223	op := &request.Operation{
224		Name:       opCreateJob,
225		HTTPMethod: "POST",
226		HTTPPath:   "/2017-08-29/jobs",
227	}
228
229	if input == nil {
230		input = &CreateJobInput{}
231	}
232
233	output = &CreateJobOutput{}
234	req = c.newRequest(op, input, output)
235	return
236}
237
238// CreateJob API operation for AWS Elemental MediaConvert.
239//
240// Create a new transcoding job. For information about jobs and job settings,
241// see the User Guide at http://docs.aws.amazon.com/mediaconvert/latest/ug/what-is.html
242//
243// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
244// with awserr.Error's Code and Message methods to get detailed information about
245// the error.
246//
247// See the AWS API reference guide for AWS Elemental MediaConvert's
248// API operation CreateJob for usage and error information.
249//
250// Returned Error Types:
251//   * BadRequestException
252//
253//   * InternalServerErrorException
254//
255//   * ForbiddenException
256//
257//   * NotFoundException
258//
259//   * TooManyRequestsException
260//
261//   * ConflictException
262//
263// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/CreateJob
264func (c *MediaConvert) CreateJob(input *CreateJobInput) (*CreateJobOutput, error) {
265	req, out := c.CreateJobRequest(input)
266	return out, req.Send()
267}
268
269// CreateJobWithContext is the same as CreateJob with the addition of
270// the ability to pass a context and additional request options.
271//
272// See CreateJob for details on how to use this API operation.
273//
274// The context must be non-nil and will be used for request cancellation. If
275// the context is nil a panic will occur. In the future the SDK may create
276// sub-contexts for http.Requests. See https://golang.org/pkg/context/
277// for more information on using Contexts.
278func (c *MediaConvert) CreateJobWithContext(ctx aws.Context, input *CreateJobInput, opts ...request.Option) (*CreateJobOutput, error) {
279	req, out := c.CreateJobRequest(input)
280	req.SetContext(ctx)
281	req.ApplyOptions(opts...)
282	return out, req.Send()
283}
284
285const opCreateJobTemplate = "CreateJobTemplate"
286
287// CreateJobTemplateRequest generates a "aws/request.Request" representing the
288// client's request for the CreateJobTemplate operation. The "output" return
289// value will be populated with the request's response once the request completes
290// successfully.
291//
292// Use "Send" method on the returned Request to send the API call to the service.
293// the "output" return value is not valid until after Send returns without error.
294//
295// See CreateJobTemplate for more information on using the CreateJobTemplate
296// API call, and error handling.
297//
298// This method is useful when you want to inject custom logic or configuration
299// into the SDK's request lifecycle. Such as custom headers, or retry logic.
300//
301//
302//    // Example sending a request using the CreateJobTemplateRequest method.
303//    req, resp := client.CreateJobTemplateRequest(params)
304//
305//    err := req.Send()
306//    if err == nil { // resp is now filled
307//        fmt.Println(resp)
308//    }
309//
310// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/CreateJobTemplate
311func (c *MediaConvert) CreateJobTemplateRequest(input *CreateJobTemplateInput) (req *request.Request, output *CreateJobTemplateOutput) {
312	op := &request.Operation{
313		Name:       opCreateJobTemplate,
314		HTTPMethod: "POST",
315		HTTPPath:   "/2017-08-29/jobTemplates",
316	}
317
318	if input == nil {
319		input = &CreateJobTemplateInput{}
320	}
321
322	output = &CreateJobTemplateOutput{}
323	req = c.newRequest(op, input, output)
324	return
325}
326
327// CreateJobTemplate API operation for AWS Elemental MediaConvert.
328//
329// Create a new job template. For information about job templates see the User
330// Guide at http://docs.aws.amazon.com/mediaconvert/latest/ug/what-is.html
331//
332// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
333// with awserr.Error's Code and Message methods to get detailed information about
334// the error.
335//
336// See the AWS API reference guide for AWS Elemental MediaConvert's
337// API operation CreateJobTemplate for usage and error information.
338//
339// Returned Error Types:
340//   * BadRequestException
341//
342//   * InternalServerErrorException
343//
344//   * ForbiddenException
345//
346//   * NotFoundException
347//
348//   * TooManyRequestsException
349//
350//   * ConflictException
351//
352// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/CreateJobTemplate
353func (c *MediaConvert) CreateJobTemplate(input *CreateJobTemplateInput) (*CreateJobTemplateOutput, error) {
354	req, out := c.CreateJobTemplateRequest(input)
355	return out, req.Send()
356}
357
358// CreateJobTemplateWithContext is the same as CreateJobTemplate with the addition of
359// the ability to pass a context and additional request options.
360//
361// See CreateJobTemplate for details on how to use this API operation.
362//
363// The context must be non-nil and will be used for request cancellation. If
364// the context is nil a panic will occur. In the future the SDK may create
365// sub-contexts for http.Requests. See https://golang.org/pkg/context/
366// for more information on using Contexts.
367func (c *MediaConvert) CreateJobTemplateWithContext(ctx aws.Context, input *CreateJobTemplateInput, opts ...request.Option) (*CreateJobTemplateOutput, error) {
368	req, out := c.CreateJobTemplateRequest(input)
369	req.SetContext(ctx)
370	req.ApplyOptions(opts...)
371	return out, req.Send()
372}
373
374const opCreatePreset = "CreatePreset"
375
376// CreatePresetRequest generates a "aws/request.Request" representing the
377// client's request for the CreatePreset operation. The "output" return
378// value will be populated with the request's response once the request completes
379// successfully.
380//
381// Use "Send" method on the returned Request to send the API call to the service.
382// the "output" return value is not valid until after Send returns without error.
383//
384// See CreatePreset for more information on using the CreatePreset
385// API call, and error handling.
386//
387// This method is useful when you want to inject custom logic or configuration
388// into the SDK's request lifecycle. Such as custom headers, or retry logic.
389//
390//
391//    // Example sending a request using the CreatePresetRequest method.
392//    req, resp := client.CreatePresetRequest(params)
393//
394//    err := req.Send()
395//    if err == nil { // resp is now filled
396//        fmt.Println(resp)
397//    }
398//
399// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/CreatePreset
400func (c *MediaConvert) CreatePresetRequest(input *CreatePresetInput) (req *request.Request, output *CreatePresetOutput) {
401	op := &request.Operation{
402		Name:       opCreatePreset,
403		HTTPMethod: "POST",
404		HTTPPath:   "/2017-08-29/presets",
405	}
406
407	if input == nil {
408		input = &CreatePresetInput{}
409	}
410
411	output = &CreatePresetOutput{}
412	req = c.newRequest(op, input, output)
413	return
414}
415
416// CreatePreset API operation for AWS Elemental MediaConvert.
417//
418// Create a new preset. For information about job templates see the User Guide
419// at http://docs.aws.amazon.com/mediaconvert/latest/ug/what-is.html
420//
421// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
422// with awserr.Error's Code and Message methods to get detailed information about
423// the error.
424//
425// See the AWS API reference guide for AWS Elemental MediaConvert's
426// API operation CreatePreset for usage and error information.
427//
428// Returned Error Types:
429//   * BadRequestException
430//
431//   * InternalServerErrorException
432//
433//   * ForbiddenException
434//
435//   * NotFoundException
436//
437//   * TooManyRequestsException
438//
439//   * ConflictException
440//
441// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/CreatePreset
442func (c *MediaConvert) CreatePreset(input *CreatePresetInput) (*CreatePresetOutput, error) {
443	req, out := c.CreatePresetRequest(input)
444	return out, req.Send()
445}
446
447// CreatePresetWithContext is the same as CreatePreset with the addition of
448// the ability to pass a context and additional request options.
449//
450// See CreatePreset for details on how to use this API operation.
451//
452// The context must be non-nil and will be used for request cancellation. If
453// the context is nil a panic will occur. In the future the SDK may create
454// sub-contexts for http.Requests. See https://golang.org/pkg/context/
455// for more information on using Contexts.
456func (c *MediaConvert) CreatePresetWithContext(ctx aws.Context, input *CreatePresetInput, opts ...request.Option) (*CreatePresetOutput, error) {
457	req, out := c.CreatePresetRequest(input)
458	req.SetContext(ctx)
459	req.ApplyOptions(opts...)
460	return out, req.Send()
461}
462
463const opCreateQueue = "CreateQueue"
464
465// CreateQueueRequest generates a "aws/request.Request" representing the
466// client's request for the CreateQueue operation. The "output" return
467// value will be populated with the request's response once the request completes
468// successfully.
469//
470// Use "Send" method on the returned Request to send the API call to the service.
471// the "output" return value is not valid until after Send returns without error.
472//
473// See CreateQueue for more information on using the CreateQueue
474// API call, and error handling.
475//
476// This method is useful when you want to inject custom logic or configuration
477// into the SDK's request lifecycle. Such as custom headers, or retry logic.
478//
479//
480//    // Example sending a request using the CreateQueueRequest method.
481//    req, resp := client.CreateQueueRequest(params)
482//
483//    err := req.Send()
484//    if err == nil { // resp is now filled
485//        fmt.Println(resp)
486//    }
487//
488// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/CreateQueue
489func (c *MediaConvert) CreateQueueRequest(input *CreateQueueInput) (req *request.Request, output *CreateQueueOutput) {
490	op := &request.Operation{
491		Name:       opCreateQueue,
492		HTTPMethod: "POST",
493		HTTPPath:   "/2017-08-29/queues",
494	}
495
496	if input == nil {
497		input = &CreateQueueInput{}
498	}
499
500	output = &CreateQueueOutput{}
501	req = c.newRequest(op, input, output)
502	return
503}
504
505// CreateQueue API operation for AWS Elemental MediaConvert.
506//
507// Create a new transcoding queue. For information about queues, see Working
508// With Queues in the User Guide at https://docs.aws.amazon.com/mediaconvert/latest/ug/working-with-queues.html
509//
510// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
511// with awserr.Error's Code and Message methods to get detailed information about
512// the error.
513//
514// See the AWS API reference guide for AWS Elemental MediaConvert's
515// API operation CreateQueue for usage and error information.
516//
517// Returned Error Types:
518//   * BadRequestException
519//
520//   * InternalServerErrorException
521//
522//   * ForbiddenException
523//
524//   * NotFoundException
525//
526//   * TooManyRequestsException
527//
528//   * ConflictException
529//
530// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/CreateQueue
531func (c *MediaConvert) CreateQueue(input *CreateQueueInput) (*CreateQueueOutput, error) {
532	req, out := c.CreateQueueRequest(input)
533	return out, req.Send()
534}
535
536// CreateQueueWithContext is the same as CreateQueue with the addition of
537// the ability to pass a context and additional request options.
538//
539// See CreateQueue for details on how to use this API operation.
540//
541// The context must be non-nil and will be used for request cancellation. If
542// the context is nil a panic will occur. In the future the SDK may create
543// sub-contexts for http.Requests. See https://golang.org/pkg/context/
544// for more information on using Contexts.
545func (c *MediaConvert) CreateQueueWithContext(ctx aws.Context, input *CreateQueueInput, opts ...request.Option) (*CreateQueueOutput, error) {
546	req, out := c.CreateQueueRequest(input)
547	req.SetContext(ctx)
548	req.ApplyOptions(opts...)
549	return out, req.Send()
550}
551
552const opDeleteJobTemplate = "DeleteJobTemplate"
553
554// DeleteJobTemplateRequest generates a "aws/request.Request" representing the
555// client's request for the DeleteJobTemplate operation. The "output" return
556// value will be populated with the request's response once the request completes
557// successfully.
558//
559// Use "Send" method on the returned Request to send the API call to the service.
560// the "output" return value is not valid until after Send returns without error.
561//
562// See DeleteJobTemplate for more information on using the DeleteJobTemplate
563// API call, and error handling.
564//
565// This method is useful when you want to inject custom logic or configuration
566// into the SDK's request lifecycle. Such as custom headers, or retry logic.
567//
568//
569//    // Example sending a request using the DeleteJobTemplateRequest method.
570//    req, resp := client.DeleteJobTemplateRequest(params)
571//
572//    err := req.Send()
573//    if err == nil { // resp is now filled
574//        fmt.Println(resp)
575//    }
576//
577// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/DeleteJobTemplate
578func (c *MediaConvert) DeleteJobTemplateRequest(input *DeleteJobTemplateInput) (req *request.Request, output *DeleteJobTemplateOutput) {
579	op := &request.Operation{
580		Name:       opDeleteJobTemplate,
581		HTTPMethod: "DELETE",
582		HTTPPath:   "/2017-08-29/jobTemplates/{name}",
583	}
584
585	if input == nil {
586		input = &DeleteJobTemplateInput{}
587	}
588
589	output = &DeleteJobTemplateOutput{}
590	req = c.newRequest(op, input, output)
591	req.Handlers.Unmarshal.Swap(restjson.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler)
592	return
593}
594
595// DeleteJobTemplate API operation for AWS Elemental MediaConvert.
596//
597// Permanently delete a job template you have created.
598//
599// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
600// with awserr.Error's Code and Message methods to get detailed information about
601// the error.
602//
603// See the AWS API reference guide for AWS Elemental MediaConvert's
604// API operation DeleteJobTemplate for usage and error information.
605//
606// Returned Error Types:
607//   * BadRequestException
608//
609//   * InternalServerErrorException
610//
611//   * ForbiddenException
612//
613//   * NotFoundException
614//
615//   * TooManyRequestsException
616//
617//   * ConflictException
618//
619// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/DeleteJobTemplate
620func (c *MediaConvert) DeleteJobTemplate(input *DeleteJobTemplateInput) (*DeleteJobTemplateOutput, error) {
621	req, out := c.DeleteJobTemplateRequest(input)
622	return out, req.Send()
623}
624
625// DeleteJobTemplateWithContext is the same as DeleteJobTemplate with the addition of
626// the ability to pass a context and additional request options.
627//
628// See DeleteJobTemplate for details on how to use this API operation.
629//
630// The context must be non-nil and will be used for request cancellation. If
631// the context is nil a panic will occur. In the future the SDK may create
632// sub-contexts for http.Requests. See https://golang.org/pkg/context/
633// for more information on using Contexts.
634func (c *MediaConvert) DeleteJobTemplateWithContext(ctx aws.Context, input *DeleteJobTemplateInput, opts ...request.Option) (*DeleteJobTemplateOutput, error) {
635	req, out := c.DeleteJobTemplateRequest(input)
636	req.SetContext(ctx)
637	req.ApplyOptions(opts...)
638	return out, req.Send()
639}
640
641const opDeletePreset = "DeletePreset"
642
643// DeletePresetRequest generates a "aws/request.Request" representing the
644// client's request for the DeletePreset operation. The "output" return
645// value will be populated with the request's response once the request completes
646// successfully.
647//
648// Use "Send" method on the returned Request to send the API call to the service.
649// the "output" return value is not valid until after Send returns without error.
650//
651// See DeletePreset for more information on using the DeletePreset
652// API call, and error handling.
653//
654// This method is useful when you want to inject custom logic or configuration
655// into the SDK's request lifecycle. Such as custom headers, or retry logic.
656//
657//
658//    // Example sending a request using the DeletePresetRequest method.
659//    req, resp := client.DeletePresetRequest(params)
660//
661//    err := req.Send()
662//    if err == nil { // resp is now filled
663//        fmt.Println(resp)
664//    }
665//
666// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/DeletePreset
667func (c *MediaConvert) DeletePresetRequest(input *DeletePresetInput) (req *request.Request, output *DeletePresetOutput) {
668	op := &request.Operation{
669		Name:       opDeletePreset,
670		HTTPMethod: "DELETE",
671		HTTPPath:   "/2017-08-29/presets/{name}",
672	}
673
674	if input == nil {
675		input = &DeletePresetInput{}
676	}
677
678	output = &DeletePresetOutput{}
679	req = c.newRequest(op, input, output)
680	req.Handlers.Unmarshal.Swap(restjson.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler)
681	return
682}
683
684// DeletePreset API operation for AWS Elemental MediaConvert.
685//
686// Permanently delete a preset you have created.
687//
688// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
689// with awserr.Error's Code and Message methods to get detailed information about
690// the error.
691//
692// See the AWS API reference guide for AWS Elemental MediaConvert's
693// API operation DeletePreset for usage and error information.
694//
695// Returned Error Types:
696//   * BadRequestException
697//
698//   * InternalServerErrorException
699//
700//   * ForbiddenException
701//
702//   * NotFoundException
703//
704//   * TooManyRequestsException
705//
706//   * ConflictException
707//
708// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/DeletePreset
709func (c *MediaConvert) DeletePreset(input *DeletePresetInput) (*DeletePresetOutput, error) {
710	req, out := c.DeletePresetRequest(input)
711	return out, req.Send()
712}
713
714// DeletePresetWithContext is the same as DeletePreset with the addition of
715// the ability to pass a context and additional request options.
716//
717// See DeletePreset for details on how to use this API operation.
718//
719// The context must be non-nil and will be used for request cancellation. If
720// the context is nil a panic will occur. In the future the SDK may create
721// sub-contexts for http.Requests. See https://golang.org/pkg/context/
722// for more information on using Contexts.
723func (c *MediaConvert) DeletePresetWithContext(ctx aws.Context, input *DeletePresetInput, opts ...request.Option) (*DeletePresetOutput, error) {
724	req, out := c.DeletePresetRequest(input)
725	req.SetContext(ctx)
726	req.ApplyOptions(opts...)
727	return out, req.Send()
728}
729
730const opDeleteQueue = "DeleteQueue"
731
732// DeleteQueueRequest generates a "aws/request.Request" representing the
733// client's request for the DeleteQueue operation. The "output" return
734// value will be populated with the request's response once the request completes
735// successfully.
736//
737// Use "Send" method on the returned Request to send the API call to the service.
738// the "output" return value is not valid until after Send returns without error.
739//
740// See DeleteQueue for more information on using the DeleteQueue
741// API call, and error handling.
742//
743// This method is useful when you want to inject custom logic or configuration
744// into the SDK's request lifecycle. Such as custom headers, or retry logic.
745//
746//
747//    // Example sending a request using the DeleteQueueRequest method.
748//    req, resp := client.DeleteQueueRequest(params)
749//
750//    err := req.Send()
751//    if err == nil { // resp is now filled
752//        fmt.Println(resp)
753//    }
754//
755// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/DeleteQueue
756func (c *MediaConvert) DeleteQueueRequest(input *DeleteQueueInput) (req *request.Request, output *DeleteQueueOutput) {
757	op := &request.Operation{
758		Name:       opDeleteQueue,
759		HTTPMethod: "DELETE",
760		HTTPPath:   "/2017-08-29/queues/{name}",
761	}
762
763	if input == nil {
764		input = &DeleteQueueInput{}
765	}
766
767	output = &DeleteQueueOutput{}
768	req = c.newRequest(op, input, output)
769	req.Handlers.Unmarshal.Swap(restjson.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler)
770	return
771}
772
773// DeleteQueue API operation for AWS Elemental MediaConvert.
774//
775// Permanently delete a queue you have created.
776//
777// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
778// with awserr.Error's Code and Message methods to get detailed information about
779// the error.
780//
781// See the AWS API reference guide for AWS Elemental MediaConvert's
782// API operation DeleteQueue for usage and error information.
783//
784// Returned Error Types:
785//   * BadRequestException
786//
787//   * InternalServerErrorException
788//
789//   * ForbiddenException
790//
791//   * NotFoundException
792//
793//   * TooManyRequestsException
794//
795//   * ConflictException
796//
797// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/DeleteQueue
798func (c *MediaConvert) DeleteQueue(input *DeleteQueueInput) (*DeleteQueueOutput, error) {
799	req, out := c.DeleteQueueRequest(input)
800	return out, req.Send()
801}
802
803// DeleteQueueWithContext is the same as DeleteQueue with the addition of
804// the ability to pass a context and additional request options.
805//
806// See DeleteQueue for details on how to use this API operation.
807//
808// The context must be non-nil and will be used for request cancellation. If
809// the context is nil a panic will occur. In the future the SDK may create
810// sub-contexts for http.Requests. See https://golang.org/pkg/context/
811// for more information on using Contexts.
812func (c *MediaConvert) DeleteQueueWithContext(ctx aws.Context, input *DeleteQueueInput, opts ...request.Option) (*DeleteQueueOutput, error) {
813	req, out := c.DeleteQueueRequest(input)
814	req.SetContext(ctx)
815	req.ApplyOptions(opts...)
816	return out, req.Send()
817}
818
819const opDescribeEndpoints = "DescribeEndpoints"
820
821// DescribeEndpointsRequest generates a "aws/request.Request" representing the
822// client's request for the DescribeEndpoints operation. The "output" return
823// value will be populated with the request's response once the request completes
824// successfully.
825//
826// Use "Send" method on the returned Request to send the API call to the service.
827// the "output" return value is not valid until after Send returns without error.
828//
829// See DescribeEndpoints for more information on using the DescribeEndpoints
830// API call, and error handling.
831//
832// This method is useful when you want to inject custom logic or configuration
833// into the SDK's request lifecycle. Such as custom headers, or retry logic.
834//
835//
836//    // Example sending a request using the DescribeEndpointsRequest method.
837//    req, resp := client.DescribeEndpointsRequest(params)
838//
839//    err := req.Send()
840//    if err == nil { // resp is now filled
841//        fmt.Println(resp)
842//    }
843//
844// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/DescribeEndpoints
845func (c *MediaConvert) DescribeEndpointsRequest(input *DescribeEndpointsInput) (req *request.Request, output *DescribeEndpointsOutput) {
846	op := &request.Operation{
847		Name:       opDescribeEndpoints,
848		HTTPMethod: "POST",
849		HTTPPath:   "/2017-08-29/endpoints",
850		Paginator: &request.Paginator{
851			InputTokens:     []string{"NextToken"},
852			OutputTokens:    []string{"NextToken"},
853			LimitToken:      "MaxResults",
854			TruncationToken: "",
855		},
856	}
857
858	if input == nil {
859		input = &DescribeEndpointsInput{}
860	}
861
862	output = &DescribeEndpointsOutput{}
863	req = c.newRequest(op, input, output)
864	return
865}
866
867// DescribeEndpoints API operation for AWS Elemental MediaConvert.
868//
869// Send an request with an empty body to the regional API endpoint to get your
870// account API endpoint.
871//
872// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
873// with awserr.Error's Code and Message methods to get detailed information about
874// the error.
875//
876// See the AWS API reference guide for AWS Elemental MediaConvert's
877// API operation DescribeEndpoints for usage and error information.
878//
879// Returned Error Types:
880//   * BadRequestException
881//
882//   * InternalServerErrorException
883//
884//   * ForbiddenException
885//
886//   * NotFoundException
887//
888//   * TooManyRequestsException
889//
890//   * ConflictException
891//
892// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/DescribeEndpoints
893func (c *MediaConvert) DescribeEndpoints(input *DescribeEndpointsInput) (*DescribeEndpointsOutput, error) {
894	req, out := c.DescribeEndpointsRequest(input)
895	return out, req.Send()
896}
897
898// DescribeEndpointsWithContext is the same as DescribeEndpoints with the addition of
899// the ability to pass a context and additional request options.
900//
901// See DescribeEndpoints for details on how to use this API operation.
902//
903// The context must be non-nil and will be used for request cancellation. If
904// the context is nil a panic will occur. In the future the SDK may create
905// sub-contexts for http.Requests. See https://golang.org/pkg/context/
906// for more information on using Contexts.
907func (c *MediaConvert) DescribeEndpointsWithContext(ctx aws.Context, input *DescribeEndpointsInput, opts ...request.Option) (*DescribeEndpointsOutput, error) {
908	req, out := c.DescribeEndpointsRequest(input)
909	req.SetContext(ctx)
910	req.ApplyOptions(opts...)
911	return out, req.Send()
912}
913
914// DescribeEndpointsPages iterates over the pages of a DescribeEndpoints operation,
915// calling the "fn" function with the response data for each page. To stop
916// iterating, return false from the fn function.
917//
918// See DescribeEndpoints method for more information on how to use this operation.
919//
920// Note: This operation can generate multiple requests to a service.
921//
922//    // Example iterating over at most 3 pages of a DescribeEndpoints operation.
923//    pageNum := 0
924//    err := client.DescribeEndpointsPages(params,
925//        func(page *mediaconvert.DescribeEndpointsOutput, lastPage bool) bool {
926//            pageNum++
927//            fmt.Println(page)
928//            return pageNum <= 3
929//        })
930//
931func (c *MediaConvert) DescribeEndpointsPages(input *DescribeEndpointsInput, fn func(*DescribeEndpointsOutput, bool) bool) error {
932	return c.DescribeEndpointsPagesWithContext(aws.BackgroundContext(), input, fn)
933}
934
935// DescribeEndpointsPagesWithContext same as DescribeEndpointsPages except
936// it takes a Context and allows setting request options on the pages.
937//
938// The context must be non-nil and will be used for request cancellation. If
939// the context is nil a panic will occur. In the future the SDK may create
940// sub-contexts for http.Requests. See https://golang.org/pkg/context/
941// for more information on using Contexts.
942func (c *MediaConvert) DescribeEndpointsPagesWithContext(ctx aws.Context, input *DescribeEndpointsInput, fn func(*DescribeEndpointsOutput, bool) bool, opts ...request.Option) error {
943	p := request.Pagination{
944		NewRequest: func() (*request.Request, error) {
945			var inCpy *DescribeEndpointsInput
946			if input != nil {
947				tmp := *input
948				inCpy = &tmp
949			}
950			req, _ := c.DescribeEndpointsRequest(inCpy)
951			req.SetContext(ctx)
952			req.ApplyOptions(opts...)
953			return req, nil
954		},
955	}
956
957	for p.Next() {
958		if !fn(p.Page().(*DescribeEndpointsOutput), !p.HasNextPage()) {
959			break
960		}
961	}
962
963	return p.Err()
964}
965
966const opDisassociateCertificate = "DisassociateCertificate"
967
968// DisassociateCertificateRequest generates a "aws/request.Request" representing the
969// client's request for the DisassociateCertificate operation. The "output" return
970// value will be populated with the request's response once the request completes
971// successfully.
972//
973// Use "Send" method on the returned Request to send the API call to the service.
974// the "output" return value is not valid until after Send returns without error.
975//
976// See DisassociateCertificate for more information on using the DisassociateCertificate
977// API call, and error handling.
978//
979// This method is useful when you want to inject custom logic or configuration
980// into the SDK's request lifecycle. Such as custom headers, or retry logic.
981//
982//
983//    // Example sending a request using the DisassociateCertificateRequest method.
984//    req, resp := client.DisassociateCertificateRequest(params)
985//
986//    err := req.Send()
987//    if err == nil { // resp is now filled
988//        fmt.Println(resp)
989//    }
990//
991// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/DisassociateCertificate
992func (c *MediaConvert) DisassociateCertificateRequest(input *DisassociateCertificateInput) (req *request.Request, output *DisassociateCertificateOutput) {
993	op := &request.Operation{
994		Name:       opDisassociateCertificate,
995		HTTPMethod: "DELETE",
996		HTTPPath:   "/2017-08-29/certificates/{arn}",
997	}
998
999	if input == nil {
1000		input = &DisassociateCertificateInput{}
1001	}
1002
1003	output = &DisassociateCertificateOutput{}
1004	req = c.newRequest(op, input, output)
1005	req.Handlers.Unmarshal.Swap(restjson.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler)
1006	return
1007}
1008
1009// DisassociateCertificate API operation for AWS Elemental MediaConvert.
1010//
1011// Removes an association between the Amazon Resource Name (ARN) of an AWS Certificate
1012// Manager (ACM) certificate and an AWS Elemental MediaConvert resource.
1013//
1014// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
1015// with awserr.Error's Code and Message methods to get detailed information about
1016// the error.
1017//
1018// See the AWS API reference guide for AWS Elemental MediaConvert's
1019// API operation DisassociateCertificate for usage and error information.
1020//
1021// Returned Error Types:
1022//   * BadRequestException
1023//
1024//   * InternalServerErrorException
1025//
1026//   * ForbiddenException
1027//
1028//   * NotFoundException
1029//
1030//   * TooManyRequestsException
1031//
1032//   * ConflictException
1033//
1034// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/DisassociateCertificate
1035func (c *MediaConvert) DisassociateCertificate(input *DisassociateCertificateInput) (*DisassociateCertificateOutput, error) {
1036	req, out := c.DisassociateCertificateRequest(input)
1037	return out, req.Send()
1038}
1039
1040// DisassociateCertificateWithContext is the same as DisassociateCertificate with the addition of
1041// the ability to pass a context and additional request options.
1042//
1043// See DisassociateCertificate for details on how to use this API operation.
1044//
1045// The context must be non-nil and will be used for request cancellation. If
1046// the context is nil a panic will occur. In the future the SDK may create
1047// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1048// for more information on using Contexts.
1049func (c *MediaConvert) DisassociateCertificateWithContext(ctx aws.Context, input *DisassociateCertificateInput, opts ...request.Option) (*DisassociateCertificateOutput, error) {
1050	req, out := c.DisassociateCertificateRequest(input)
1051	req.SetContext(ctx)
1052	req.ApplyOptions(opts...)
1053	return out, req.Send()
1054}
1055
1056const opGetJob = "GetJob"
1057
1058// GetJobRequest generates a "aws/request.Request" representing the
1059// client's request for the GetJob operation. The "output" return
1060// value will be populated with the request's response once the request completes
1061// successfully.
1062//
1063// Use "Send" method on the returned Request to send the API call to the service.
1064// the "output" return value is not valid until after Send returns without error.
1065//
1066// See GetJob for more information on using the GetJob
1067// API call, and error handling.
1068//
1069// This method is useful when you want to inject custom logic or configuration
1070// into the SDK's request lifecycle. Such as custom headers, or retry logic.
1071//
1072//
1073//    // Example sending a request using the GetJobRequest method.
1074//    req, resp := client.GetJobRequest(params)
1075//
1076//    err := req.Send()
1077//    if err == nil { // resp is now filled
1078//        fmt.Println(resp)
1079//    }
1080//
1081// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/GetJob
1082func (c *MediaConvert) GetJobRequest(input *GetJobInput) (req *request.Request, output *GetJobOutput) {
1083	op := &request.Operation{
1084		Name:       opGetJob,
1085		HTTPMethod: "GET",
1086		HTTPPath:   "/2017-08-29/jobs/{id}",
1087	}
1088
1089	if input == nil {
1090		input = &GetJobInput{}
1091	}
1092
1093	output = &GetJobOutput{}
1094	req = c.newRequest(op, input, output)
1095	return
1096}
1097
1098// GetJob API operation for AWS Elemental MediaConvert.
1099//
1100// Retrieve the JSON for a specific completed transcoding job.
1101//
1102// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
1103// with awserr.Error's Code and Message methods to get detailed information about
1104// the error.
1105//
1106// See the AWS API reference guide for AWS Elemental MediaConvert's
1107// API operation GetJob for usage and error information.
1108//
1109// Returned Error Types:
1110//   * BadRequestException
1111//
1112//   * InternalServerErrorException
1113//
1114//   * ForbiddenException
1115//
1116//   * NotFoundException
1117//
1118//   * TooManyRequestsException
1119//
1120//   * ConflictException
1121//
1122// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/GetJob
1123func (c *MediaConvert) GetJob(input *GetJobInput) (*GetJobOutput, error) {
1124	req, out := c.GetJobRequest(input)
1125	return out, req.Send()
1126}
1127
1128// GetJobWithContext is the same as GetJob with the addition of
1129// the ability to pass a context and additional request options.
1130//
1131// See GetJob for details on how to use this API operation.
1132//
1133// The context must be non-nil and will be used for request cancellation. If
1134// the context is nil a panic will occur. In the future the SDK may create
1135// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1136// for more information on using Contexts.
1137func (c *MediaConvert) GetJobWithContext(ctx aws.Context, input *GetJobInput, opts ...request.Option) (*GetJobOutput, error) {
1138	req, out := c.GetJobRequest(input)
1139	req.SetContext(ctx)
1140	req.ApplyOptions(opts...)
1141	return out, req.Send()
1142}
1143
1144const opGetJobTemplate = "GetJobTemplate"
1145
1146// GetJobTemplateRequest generates a "aws/request.Request" representing the
1147// client's request for the GetJobTemplate operation. The "output" return
1148// value will be populated with the request's response once the request completes
1149// successfully.
1150//
1151// Use "Send" method on the returned Request to send the API call to the service.
1152// the "output" return value is not valid until after Send returns without error.
1153//
1154// See GetJobTemplate for more information on using the GetJobTemplate
1155// API call, and error handling.
1156//
1157// This method is useful when you want to inject custom logic or configuration
1158// into the SDK's request lifecycle. Such as custom headers, or retry logic.
1159//
1160//
1161//    // Example sending a request using the GetJobTemplateRequest method.
1162//    req, resp := client.GetJobTemplateRequest(params)
1163//
1164//    err := req.Send()
1165//    if err == nil { // resp is now filled
1166//        fmt.Println(resp)
1167//    }
1168//
1169// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/GetJobTemplate
1170func (c *MediaConvert) GetJobTemplateRequest(input *GetJobTemplateInput) (req *request.Request, output *GetJobTemplateOutput) {
1171	op := &request.Operation{
1172		Name:       opGetJobTemplate,
1173		HTTPMethod: "GET",
1174		HTTPPath:   "/2017-08-29/jobTemplates/{name}",
1175	}
1176
1177	if input == nil {
1178		input = &GetJobTemplateInput{}
1179	}
1180
1181	output = &GetJobTemplateOutput{}
1182	req = c.newRequest(op, input, output)
1183	return
1184}
1185
1186// GetJobTemplate API operation for AWS Elemental MediaConvert.
1187//
1188// Retrieve the JSON for a specific job template.
1189//
1190// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
1191// with awserr.Error's Code and Message methods to get detailed information about
1192// the error.
1193//
1194// See the AWS API reference guide for AWS Elemental MediaConvert's
1195// API operation GetJobTemplate for usage and error information.
1196//
1197// Returned Error Types:
1198//   * BadRequestException
1199//
1200//   * InternalServerErrorException
1201//
1202//   * ForbiddenException
1203//
1204//   * NotFoundException
1205//
1206//   * TooManyRequestsException
1207//
1208//   * ConflictException
1209//
1210// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/GetJobTemplate
1211func (c *MediaConvert) GetJobTemplate(input *GetJobTemplateInput) (*GetJobTemplateOutput, error) {
1212	req, out := c.GetJobTemplateRequest(input)
1213	return out, req.Send()
1214}
1215
1216// GetJobTemplateWithContext is the same as GetJobTemplate with the addition of
1217// the ability to pass a context and additional request options.
1218//
1219// See GetJobTemplate for details on how to use this API operation.
1220//
1221// The context must be non-nil and will be used for request cancellation. If
1222// the context is nil a panic will occur. In the future the SDK may create
1223// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1224// for more information on using Contexts.
1225func (c *MediaConvert) GetJobTemplateWithContext(ctx aws.Context, input *GetJobTemplateInput, opts ...request.Option) (*GetJobTemplateOutput, error) {
1226	req, out := c.GetJobTemplateRequest(input)
1227	req.SetContext(ctx)
1228	req.ApplyOptions(opts...)
1229	return out, req.Send()
1230}
1231
1232const opGetPreset = "GetPreset"
1233
1234// GetPresetRequest generates a "aws/request.Request" representing the
1235// client's request for the GetPreset operation. The "output" return
1236// value will be populated with the request's response once the request completes
1237// successfully.
1238//
1239// Use "Send" method on the returned Request to send the API call to the service.
1240// the "output" return value is not valid until after Send returns without error.
1241//
1242// See GetPreset for more information on using the GetPreset
1243// API call, and error handling.
1244//
1245// This method is useful when you want to inject custom logic or configuration
1246// into the SDK's request lifecycle. Such as custom headers, or retry logic.
1247//
1248//
1249//    // Example sending a request using the GetPresetRequest method.
1250//    req, resp := client.GetPresetRequest(params)
1251//
1252//    err := req.Send()
1253//    if err == nil { // resp is now filled
1254//        fmt.Println(resp)
1255//    }
1256//
1257// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/GetPreset
1258func (c *MediaConvert) GetPresetRequest(input *GetPresetInput) (req *request.Request, output *GetPresetOutput) {
1259	op := &request.Operation{
1260		Name:       opGetPreset,
1261		HTTPMethod: "GET",
1262		HTTPPath:   "/2017-08-29/presets/{name}",
1263	}
1264
1265	if input == nil {
1266		input = &GetPresetInput{}
1267	}
1268
1269	output = &GetPresetOutput{}
1270	req = c.newRequest(op, input, output)
1271	return
1272}
1273
1274// GetPreset API operation for AWS Elemental MediaConvert.
1275//
1276// Retrieve the JSON for a specific preset.
1277//
1278// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
1279// with awserr.Error's Code and Message methods to get detailed information about
1280// the error.
1281//
1282// See the AWS API reference guide for AWS Elemental MediaConvert's
1283// API operation GetPreset for usage and error information.
1284//
1285// Returned Error Types:
1286//   * BadRequestException
1287//
1288//   * InternalServerErrorException
1289//
1290//   * ForbiddenException
1291//
1292//   * NotFoundException
1293//
1294//   * TooManyRequestsException
1295//
1296//   * ConflictException
1297//
1298// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/GetPreset
1299func (c *MediaConvert) GetPreset(input *GetPresetInput) (*GetPresetOutput, error) {
1300	req, out := c.GetPresetRequest(input)
1301	return out, req.Send()
1302}
1303
1304// GetPresetWithContext is the same as GetPreset with the addition of
1305// the ability to pass a context and additional request options.
1306//
1307// See GetPreset for details on how to use this API operation.
1308//
1309// The context must be non-nil and will be used for request cancellation. If
1310// the context is nil a panic will occur. In the future the SDK may create
1311// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1312// for more information on using Contexts.
1313func (c *MediaConvert) GetPresetWithContext(ctx aws.Context, input *GetPresetInput, opts ...request.Option) (*GetPresetOutput, error) {
1314	req, out := c.GetPresetRequest(input)
1315	req.SetContext(ctx)
1316	req.ApplyOptions(opts...)
1317	return out, req.Send()
1318}
1319
1320const opGetQueue = "GetQueue"
1321
1322// GetQueueRequest generates a "aws/request.Request" representing the
1323// client's request for the GetQueue operation. The "output" return
1324// value will be populated with the request's response once the request completes
1325// successfully.
1326//
1327// Use "Send" method on the returned Request to send the API call to the service.
1328// the "output" return value is not valid until after Send returns without error.
1329//
1330// See GetQueue for more information on using the GetQueue
1331// API call, and error handling.
1332//
1333// This method is useful when you want to inject custom logic or configuration
1334// into the SDK's request lifecycle. Such as custom headers, or retry logic.
1335//
1336//
1337//    // Example sending a request using the GetQueueRequest method.
1338//    req, resp := client.GetQueueRequest(params)
1339//
1340//    err := req.Send()
1341//    if err == nil { // resp is now filled
1342//        fmt.Println(resp)
1343//    }
1344//
1345// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/GetQueue
1346func (c *MediaConvert) GetQueueRequest(input *GetQueueInput) (req *request.Request, output *GetQueueOutput) {
1347	op := &request.Operation{
1348		Name:       opGetQueue,
1349		HTTPMethod: "GET",
1350		HTTPPath:   "/2017-08-29/queues/{name}",
1351	}
1352
1353	if input == nil {
1354		input = &GetQueueInput{}
1355	}
1356
1357	output = &GetQueueOutput{}
1358	req = c.newRequest(op, input, output)
1359	return
1360}
1361
1362// GetQueue API operation for AWS Elemental MediaConvert.
1363//
1364// Retrieve the JSON for a specific queue.
1365//
1366// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
1367// with awserr.Error's Code and Message methods to get detailed information about
1368// the error.
1369//
1370// See the AWS API reference guide for AWS Elemental MediaConvert's
1371// API operation GetQueue for usage and error information.
1372//
1373// Returned Error Types:
1374//   * BadRequestException
1375//
1376//   * InternalServerErrorException
1377//
1378//   * ForbiddenException
1379//
1380//   * NotFoundException
1381//
1382//   * TooManyRequestsException
1383//
1384//   * ConflictException
1385//
1386// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/GetQueue
1387func (c *MediaConvert) GetQueue(input *GetQueueInput) (*GetQueueOutput, error) {
1388	req, out := c.GetQueueRequest(input)
1389	return out, req.Send()
1390}
1391
1392// GetQueueWithContext is the same as GetQueue with the addition of
1393// the ability to pass a context and additional request options.
1394//
1395// See GetQueue for details on how to use this API operation.
1396//
1397// The context must be non-nil and will be used for request cancellation. If
1398// the context is nil a panic will occur. In the future the SDK may create
1399// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1400// for more information on using Contexts.
1401func (c *MediaConvert) GetQueueWithContext(ctx aws.Context, input *GetQueueInput, opts ...request.Option) (*GetQueueOutput, error) {
1402	req, out := c.GetQueueRequest(input)
1403	req.SetContext(ctx)
1404	req.ApplyOptions(opts...)
1405	return out, req.Send()
1406}
1407
1408const opListJobTemplates = "ListJobTemplates"
1409
1410// ListJobTemplatesRequest generates a "aws/request.Request" representing the
1411// client's request for the ListJobTemplates operation. The "output" return
1412// value will be populated with the request's response once the request completes
1413// successfully.
1414//
1415// Use "Send" method on the returned Request to send the API call to the service.
1416// the "output" return value is not valid until after Send returns without error.
1417//
1418// See ListJobTemplates for more information on using the ListJobTemplates
1419// API call, and error handling.
1420//
1421// This method is useful when you want to inject custom logic or configuration
1422// into the SDK's request lifecycle. Such as custom headers, or retry logic.
1423//
1424//
1425//    // Example sending a request using the ListJobTemplatesRequest method.
1426//    req, resp := client.ListJobTemplatesRequest(params)
1427//
1428//    err := req.Send()
1429//    if err == nil { // resp is now filled
1430//        fmt.Println(resp)
1431//    }
1432//
1433// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/ListJobTemplates
1434func (c *MediaConvert) ListJobTemplatesRequest(input *ListJobTemplatesInput) (req *request.Request, output *ListJobTemplatesOutput) {
1435	op := &request.Operation{
1436		Name:       opListJobTemplates,
1437		HTTPMethod: "GET",
1438		HTTPPath:   "/2017-08-29/jobTemplates",
1439		Paginator: &request.Paginator{
1440			InputTokens:     []string{"NextToken"},
1441			OutputTokens:    []string{"NextToken"},
1442			LimitToken:      "MaxResults",
1443			TruncationToken: "",
1444		},
1445	}
1446
1447	if input == nil {
1448		input = &ListJobTemplatesInput{}
1449	}
1450
1451	output = &ListJobTemplatesOutput{}
1452	req = c.newRequest(op, input, output)
1453	return
1454}
1455
1456// ListJobTemplates API operation for AWS Elemental MediaConvert.
1457//
1458// Retrieve a JSON array of up to twenty of your job templates. This will return
1459// the templates themselves, not just a list of them. To retrieve the next twenty
1460// templates, use the nextToken string returned with the array
1461//
1462// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
1463// with awserr.Error's Code and Message methods to get detailed information about
1464// the error.
1465//
1466// See the AWS API reference guide for AWS Elemental MediaConvert's
1467// API operation ListJobTemplates for usage and error information.
1468//
1469// Returned Error Types:
1470//   * BadRequestException
1471//
1472//   * InternalServerErrorException
1473//
1474//   * ForbiddenException
1475//
1476//   * NotFoundException
1477//
1478//   * TooManyRequestsException
1479//
1480//   * ConflictException
1481//
1482// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/ListJobTemplates
1483func (c *MediaConvert) ListJobTemplates(input *ListJobTemplatesInput) (*ListJobTemplatesOutput, error) {
1484	req, out := c.ListJobTemplatesRequest(input)
1485	return out, req.Send()
1486}
1487
1488// ListJobTemplatesWithContext is the same as ListJobTemplates with the addition of
1489// the ability to pass a context and additional request options.
1490//
1491// See ListJobTemplates for details on how to use this API operation.
1492//
1493// The context must be non-nil and will be used for request cancellation. If
1494// the context is nil a panic will occur. In the future the SDK may create
1495// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1496// for more information on using Contexts.
1497func (c *MediaConvert) ListJobTemplatesWithContext(ctx aws.Context, input *ListJobTemplatesInput, opts ...request.Option) (*ListJobTemplatesOutput, error) {
1498	req, out := c.ListJobTemplatesRequest(input)
1499	req.SetContext(ctx)
1500	req.ApplyOptions(opts...)
1501	return out, req.Send()
1502}
1503
1504// ListJobTemplatesPages iterates over the pages of a ListJobTemplates operation,
1505// calling the "fn" function with the response data for each page. To stop
1506// iterating, return false from the fn function.
1507//
1508// See ListJobTemplates method for more information on how to use this operation.
1509//
1510// Note: This operation can generate multiple requests to a service.
1511//
1512//    // Example iterating over at most 3 pages of a ListJobTemplates operation.
1513//    pageNum := 0
1514//    err := client.ListJobTemplatesPages(params,
1515//        func(page *mediaconvert.ListJobTemplatesOutput, lastPage bool) bool {
1516//            pageNum++
1517//            fmt.Println(page)
1518//            return pageNum <= 3
1519//        })
1520//
1521func (c *MediaConvert) ListJobTemplatesPages(input *ListJobTemplatesInput, fn func(*ListJobTemplatesOutput, bool) bool) error {
1522	return c.ListJobTemplatesPagesWithContext(aws.BackgroundContext(), input, fn)
1523}
1524
1525// ListJobTemplatesPagesWithContext same as ListJobTemplatesPages except
1526// it takes a Context and allows setting request options on the pages.
1527//
1528// The context must be non-nil and will be used for request cancellation. If
1529// the context is nil a panic will occur. In the future the SDK may create
1530// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1531// for more information on using Contexts.
1532func (c *MediaConvert) ListJobTemplatesPagesWithContext(ctx aws.Context, input *ListJobTemplatesInput, fn func(*ListJobTemplatesOutput, bool) bool, opts ...request.Option) error {
1533	p := request.Pagination{
1534		NewRequest: func() (*request.Request, error) {
1535			var inCpy *ListJobTemplatesInput
1536			if input != nil {
1537				tmp := *input
1538				inCpy = &tmp
1539			}
1540			req, _ := c.ListJobTemplatesRequest(inCpy)
1541			req.SetContext(ctx)
1542			req.ApplyOptions(opts...)
1543			return req, nil
1544		},
1545	}
1546
1547	for p.Next() {
1548		if !fn(p.Page().(*ListJobTemplatesOutput), !p.HasNextPage()) {
1549			break
1550		}
1551	}
1552
1553	return p.Err()
1554}
1555
1556const opListJobs = "ListJobs"
1557
1558// ListJobsRequest generates a "aws/request.Request" representing the
1559// client's request for the ListJobs operation. The "output" return
1560// value will be populated with the request's response once the request completes
1561// successfully.
1562//
1563// Use "Send" method on the returned Request to send the API call to the service.
1564// the "output" return value is not valid until after Send returns without error.
1565//
1566// See ListJobs for more information on using the ListJobs
1567// API call, and error handling.
1568//
1569// This method is useful when you want to inject custom logic or configuration
1570// into the SDK's request lifecycle. Such as custom headers, or retry logic.
1571//
1572//
1573//    // Example sending a request using the ListJobsRequest method.
1574//    req, resp := client.ListJobsRequest(params)
1575//
1576//    err := req.Send()
1577//    if err == nil { // resp is now filled
1578//        fmt.Println(resp)
1579//    }
1580//
1581// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/ListJobs
1582func (c *MediaConvert) ListJobsRequest(input *ListJobsInput) (req *request.Request, output *ListJobsOutput) {
1583	op := &request.Operation{
1584		Name:       opListJobs,
1585		HTTPMethod: "GET",
1586		HTTPPath:   "/2017-08-29/jobs",
1587		Paginator: &request.Paginator{
1588			InputTokens:     []string{"NextToken"},
1589			OutputTokens:    []string{"NextToken"},
1590			LimitToken:      "MaxResults",
1591			TruncationToken: "",
1592		},
1593	}
1594
1595	if input == nil {
1596		input = &ListJobsInput{}
1597	}
1598
1599	output = &ListJobsOutput{}
1600	req = c.newRequest(op, input, output)
1601	return
1602}
1603
1604// ListJobs API operation for AWS Elemental MediaConvert.
1605//
1606// Retrieve a JSON array of up to twenty of your most recently created jobs.
1607// This array includes in-process, completed, and errored jobs. This will return
1608// the jobs themselves, not just a list of the jobs. To retrieve the twenty
1609// next most recent jobs, use the nextToken string returned with the array.
1610//
1611// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
1612// with awserr.Error's Code and Message methods to get detailed information about
1613// the error.
1614//
1615// See the AWS API reference guide for AWS Elemental MediaConvert's
1616// API operation ListJobs for usage and error information.
1617//
1618// Returned Error Types:
1619//   * BadRequestException
1620//
1621//   * InternalServerErrorException
1622//
1623//   * ForbiddenException
1624//
1625//   * NotFoundException
1626//
1627//   * TooManyRequestsException
1628//
1629//   * ConflictException
1630//
1631// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/ListJobs
1632func (c *MediaConvert) ListJobs(input *ListJobsInput) (*ListJobsOutput, error) {
1633	req, out := c.ListJobsRequest(input)
1634	return out, req.Send()
1635}
1636
1637// ListJobsWithContext is the same as ListJobs with the addition of
1638// the ability to pass a context and additional request options.
1639//
1640// See ListJobs for details on how to use this API operation.
1641//
1642// The context must be non-nil and will be used for request cancellation. If
1643// the context is nil a panic will occur. In the future the SDK may create
1644// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1645// for more information on using Contexts.
1646func (c *MediaConvert) ListJobsWithContext(ctx aws.Context, input *ListJobsInput, opts ...request.Option) (*ListJobsOutput, error) {
1647	req, out := c.ListJobsRequest(input)
1648	req.SetContext(ctx)
1649	req.ApplyOptions(opts...)
1650	return out, req.Send()
1651}
1652
1653// ListJobsPages iterates over the pages of a ListJobs operation,
1654// calling the "fn" function with the response data for each page. To stop
1655// iterating, return false from the fn function.
1656//
1657// See ListJobs method for more information on how to use this operation.
1658//
1659// Note: This operation can generate multiple requests to a service.
1660//
1661//    // Example iterating over at most 3 pages of a ListJobs operation.
1662//    pageNum := 0
1663//    err := client.ListJobsPages(params,
1664//        func(page *mediaconvert.ListJobsOutput, lastPage bool) bool {
1665//            pageNum++
1666//            fmt.Println(page)
1667//            return pageNum <= 3
1668//        })
1669//
1670func (c *MediaConvert) ListJobsPages(input *ListJobsInput, fn func(*ListJobsOutput, bool) bool) error {
1671	return c.ListJobsPagesWithContext(aws.BackgroundContext(), input, fn)
1672}
1673
1674// ListJobsPagesWithContext same as ListJobsPages except
1675// it takes a Context and allows setting request options on the pages.
1676//
1677// The context must be non-nil and will be used for request cancellation. If
1678// the context is nil a panic will occur. In the future the SDK may create
1679// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1680// for more information on using Contexts.
1681func (c *MediaConvert) ListJobsPagesWithContext(ctx aws.Context, input *ListJobsInput, fn func(*ListJobsOutput, bool) bool, opts ...request.Option) error {
1682	p := request.Pagination{
1683		NewRequest: func() (*request.Request, error) {
1684			var inCpy *ListJobsInput
1685			if input != nil {
1686				tmp := *input
1687				inCpy = &tmp
1688			}
1689			req, _ := c.ListJobsRequest(inCpy)
1690			req.SetContext(ctx)
1691			req.ApplyOptions(opts...)
1692			return req, nil
1693		},
1694	}
1695
1696	for p.Next() {
1697		if !fn(p.Page().(*ListJobsOutput), !p.HasNextPage()) {
1698			break
1699		}
1700	}
1701
1702	return p.Err()
1703}
1704
1705const opListPresets = "ListPresets"
1706
1707// ListPresetsRequest generates a "aws/request.Request" representing the
1708// client's request for the ListPresets operation. The "output" return
1709// value will be populated with the request's response once the request completes
1710// successfully.
1711//
1712// Use "Send" method on the returned Request to send the API call to the service.
1713// the "output" return value is not valid until after Send returns without error.
1714//
1715// See ListPresets for more information on using the ListPresets
1716// API call, and error handling.
1717//
1718// This method is useful when you want to inject custom logic or configuration
1719// into the SDK's request lifecycle. Such as custom headers, or retry logic.
1720//
1721//
1722//    // Example sending a request using the ListPresetsRequest method.
1723//    req, resp := client.ListPresetsRequest(params)
1724//
1725//    err := req.Send()
1726//    if err == nil { // resp is now filled
1727//        fmt.Println(resp)
1728//    }
1729//
1730// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/ListPresets
1731func (c *MediaConvert) ListPresetsRequest(input *ListPresetsInput) (req *request.Request, output *ListPresetsOutput) {
1732	op := &request.Operation{
1733		Name:       opListPresets,
1734		HTTPMethod: "GET",
1735		HTTPPath:   "/2017-08-29/presets",
1736		Paginator: &request.Paginator{
1737			InputTokens:     []string{"NextToken"},
1738			OutputTokens:    []string{"NextToken"},
1739			LimitToken:      "MaxResults",
1740			TruncationToken: "",
1741		},
1742	}
1743
1744	if input == nil {
1745		input = &ListPresetsInput{}
1746	}
1747
1748	output = &ListPresetsOutput{}
1749	req = c.newRequest(op, input, output)
1750	return
1751}
1752
1753// ListPresets API operation for AWS Elemental MediaConvert.
1754//
1755// Retrieve a JSON array of up to twenty of your presets. This will return the
1756// presets themselves, not just a list of them. To retrieve the next twenty
1757// presets, use the nextToken string returned with the array.
1758//
1759// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
1760// with awserr.Error's Code and Message methods to get detailed information about
1761// the error.
1762//
1763// See the AWS API reference guide for AWS Elemental MediaConvert's
1764// API operation ListPresets for usage and error information.
1765//
1766// Returned Error Types:
1767//   * BadRequestException
1768//
1769//   * InternalServerErrorException
1770//
1771//   * ForbiddenException
1772//
1773//   * NotFoundException
1774//
1775//   * TooManyRequestsException
1776//
1777//   * ConflictException
1778//
1779// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/ListPresets
1780func (c *MediaConvert) ListPresets(input *ListPresetsInput) (*ListPresetsOutput, error) {
1781	req, out := c.ListPresetsRequest(input)
1782	return out, req.Send()
1783}
1784
1785// ListPresetsWithContext is the same as ListPresets with the addition of
1786// the ability to pass a context and additional request options.
1787//
1788// See ListPresets for details on how to use this API operation.
1789//
1790// The context must be non-nil and will be used for request cancellation. If
1791// the context is nil a panic will occur. In the future the SDK may create
1792// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1793// for more information on using Contexts.
1794func (c *MediaConvert) ListPresetsWithContext(ctx aws.Context, input *ListPresetsInput, opts ...request.Option) (*ListPresetsOutput, error) {
1795	req, out := c.ListPresetsRequest(input)
1796	req.SetContext(ctx)
1797	req.ApplyOptions(opts...)
1798	return out, req.Send()
1799}
1800
1801// ListPresetsPages iterates over the pages of a ListPresets operation,
1802// calling the "fn" function with the response data for each page. To stop
1803// iterating, return false from the fn function.
1804//
1805// See ListPresets method for more information on how to use this operation.
1806//
1807// Note: This operation can generate multiple requests to a service.
1808//
1809//    // Example iterating over at most 3 pages of a ListPresets operation.
1810//    pageNum := 0
1811//    err := client.ListPresetsPages(params,
1812//        func(page *mediaconvert.ListPresetsOutput, lastPage bool) bool {
1813//            pageNum++
1814//            fmt.Println(page)
1815//            return pageNum <= 3
1816//        })
1817//
1818func (c *MediaConvert) ListPresetsPages(input *ListPresetsInput, fn func(*ListPresetsOutput, bool) bool) error {
1819	return c.ListPresetsPagesWithContext(aws.BackgroundContext(), input, fn)
1820}
1821
1822// ListPresetsPagesWithContext same as ListPresetsPages except
1823// it takes a Context and allows setting request options on the pages.
1824//
1825// The context must be non-nil and will be used for request cancellation. If
1826// the context is nil a panic will occur. In the future the SDK may create
1827// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1828// for more information on using Contexts.
1829func (c *MediaConvert) ListPresetsPagesWithContext(ctx aws.Context, input *ListPresetsInput, fn func(*ListPresetsOutput, bool) bool, opts ...request.Option) error {
1830	p := request.Pagination{
1831		NewRequest: func() (*request.Request, error) {
1832			var inCpy *ListPresetsInput
1833			if input != nil {
1834				tmp := *input
1835				inCpy = &tmp
1836			}
1837			req, _ := c.ListPresetsRequest(inCpy)
1838			req.SetContext(ctx)
1839			req.ApplyOptions(opts...)
1840			return req, nil
1841		},
1842	}
1843
1844	for p.Next() {
1845		if !fn(p.Page().(*ListPresetsOutput), !p.HasNextPage()) {
1846			break
1847		}
1848	}
1849
1850	return p.Err()
1851}
1852
1853const opListQueues = "ListQueues"
1854
1855// ListQueuesRequest generates a "aws/request.Request" representing the
1856// client's request for the ListQueues operation. The "output" return
1857// value will be populated with the request's response once the request completes
1858// successfully.
1859//
1860// Use "Send" method on the returned Request to send the API call to the service.
1861// the "output" return value is not valid until after Send returns without error.
1862//
1863// See ListQueues for more information on using the ListQueues
1864// API call, and error handling.
1865//
1866// This method is useful when you want to inject custom logic or configuration
1867// into the SDK's request lifecycle. Such as custom headers, or retry logic.
1868//
1869//
1870//    // Example sending a request using the ListQueuesRequest method.
1871//    req, resp := client.ListQueuesRequest(params)
1872//
1873//    err := req.Send()
1874//    if err == nil { // resp is now filled
1875//        fmt.Println(resp)
1876//    }
1877//
1878// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/ListQueues
1879func (c *MediaConvert) ListQueuesRequest(input *ListQueuesInput) (req *request.Request, output *ListQueuesOutput) {
1880	op := &request.Operation{
1881		Name:       opListQueues,
1882		HTTPMethod: "GET",
1883		HTTPPath:   "/2017-08-29/queues",
1884		Paginator: &request.Paginator{
1885			InputTokens:     []string{"NextToken"},
1886			OutputTokens:    []string{"NextToken"},
1887			LimitToken:      "MaxResults",
1888			TruncationToken: "",
1889		},
1890	}
1891
1892	if input == nil {
1893		input = &ListQueuesInput{}
1894	}
1895
1896	output = &ListQueuesOutput{}
1897	req = c.newRequest(op, input, output)
1898	return
1899}
1900
1901// ListQueues API operation for AWS Elemental MediaConvert.
1902//
1903// Retrieve a JSON array of up to twenty of your queues. This will return the
1904// queues themselves, not just a list of them. To retrieve the next twenty queues,
1905// use the nextToken string returned with the array.
1906//
1907// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
1908// with awserr.Error's Code and Message methods to get detailed information about
1909// the error.
1910//
1911// See the AWS API reference guide for AWS Elemental MediaConvert's
1912// API operation ListQueues for usage and error information.
1913//
1914// Returned Error Types:
1915//   * BadRequestException
1916//
1917//   * InternalServerErrorException
1918//
1919//   * ForbiddenException
1920//
1921//   * NotFoundException
1922//
1923//   * TooManyRequestsException
1924//
1925//   * ConflictException
1926//
1927// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/ListQueues
1928func (c *MediaConvert) ListQueues(input *ListQueuesInput) (*ListQueuesOutput, error) {
1929	req, out := c.ListQueuesRequest(input)
1930	return out, req.Send()
1931}
1932
1933// ListQueuesWithContext is the same as ListQueues with the addition of
1934// the ability to pass a context and additional request options.
1935//
1936// See ListQueues for details on how to use this API operation.
1937//
1938// The context must be non-nil and will be used for request cancellation. If
1939// the context is nil a panic will occur. In the future the SDK may create
1940// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1941// for more information on using Contexts.
1942func (c *MediaConvert) ListQueuesWithContext(ctx aws.Context, input *ListQueuesInput, opts ...request.Option) (*ListQueuesOutput, error) {
1943	req, out := c.ListQueuesRequest(input)
1944	req.SetContext(ctx)
1945	req.ApplyOptions(opts...)
1946	return out, req.Send()
1947}
1948
1949// ListQueuesPages iterates over the pages of a ListQueues operation,
1950// calling the "fn" function with the response data for each page. To stop
1951// iterating, return false from the fn function.
1952//
1953// See ListQueues method for more information on how to use this operation.
1954//
1955// Note: This operation can generate multiple requests to a service.
1956//
1957//    // Example iterating over at most 3 pages of a ListQueues operation.
1958//    pageNum := 0
1959//    err := client.ListQueuesPages(params,
1960//        func(page *mediaconvert.ListQueuesOutput, lastPage bool) bool {
1961//            pageNum++
1962//            fmt.Println(page)
1963//            return pageNum <= 3
1964//        })
1965//
1966func (c *MediaConvert) ListQueuesPages(input *ListQueuesInput, fn func(*ListQueuesOutput, bool) bool) error {
1967	return c.ListQueuesPagesWithContext(aws.BackgroundContext(), input, fn)
1968}
1969
1970// ListQueuesPagesWithContext same as ListQueuesPages except
1971// it takes a Context and allows setting request options on the pages.
1972//
1973// The context must be non-nil and will be used for request cancellation. If
1974// the context is nil a panic will occur. In the future the SDK may create
1975// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1976// for more information on using Contexts.
1977func (c *MediaConvert) ListQueuesPagesWithContext(ctx aws.Context, input *ListQueuesInput, fn func(*ListQueuesOutput, bool) bool, opts ...request.Option) error {
1978	p := request.Pagination{
1979		NewRequest: func() (*request.Request, error) {
1980			var inCpy *ListQueuesInput
1981			if input != nil {
1982				tmp := *input
1983				inCpy = &tmp
1984			}
1985			req, _ := c.ListQueuesRequest(inCpy)
1986			req.SetContext(ctx)
1987			req.ApplyOptions(opts...)
1988			return req, nil
1989		},
1990	}
1991
1992	for p.Next() {
1993		if !fn(p.Page().(*ListQueuesOutput), !p.HasNextPage()) {
1994			break
1995		}
1996	}
1997
1998	return p.Err()
1999}
2000
2001const opListTagsForResource = "ListTagsForResource"
2002
2003// ListTagsForResourceRequest generates a "aws/request.Request" representing the
2004// client's request for the ListTagsForResource operation. The "output" return
2005// value will be populated with the request's response once the request completes
2006// successfully.
2007//
2008// Use "Send" method on the returned Request to send the API call to the service.
2009// the "output" return value is not valid until after Send returns without error.
2010//
2011// See ListTagsForResource for more information on using the ListTagsForResource
2012// API call, and error handling.
2013//
2014// This method is useful when you want to inject custom logic or configuration
2015// into the SDK's request lifecycle. Such as custom headers, or retry logic.
2016//
2017//
2018//    // Example sending a request using the ListTagsForResourceRequest method.
2019//    req, resp := client.ListTagsForResourceRequest(params)
2020//
2021//    err := req.Send()
2022//    if err == nil { // resp is now filled
2023//        fmt.Println(resp)
2024//    }
2025//
2026// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/ListTagsForResource
2027func (c *MediaConvert) ListTagsForResourceRequest(input *ListTagsForResourceInput) (req *request.Request, output *ListTagsForResourceOutput) {
2028	op := &request.Operation{
2029		Name:       opListTagsForResource,
2030		HTTPMethod: "GET",
2031		HTTPPath:   "/2017-08-29/tags/{arn}",
2032	}
2033
2034	if input == nil {
2035		input = &ListTagsForResourceInput{}
2036	}
2037
2038	output = &ListTagsForResourceOutput{}
2039	req = c.newRequest(op, input, output)
2040	return
2041}
2042
2043// ListTagsForResource API operation for AWS Elemental MediaConvert.
2044//
2045// Retrieve the tags for a MediaConvert resource.
2046//
2047// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
2048// with awserr.Error's Code and Message methods to get detailed information about
2049// the error.
2050//
2051// See the AWS API reference guide for AWS Elemental MediaConvert's
2052// API operation ListTagsForResource for usage and error information.
2053//
2054// Returned Error Types:
2055//   * BadRequestException
2056//
2057//   * InternalServerErrorException
2058//
2059//   * ForbiddenException
2060//
2061//   * NotFoundException
2062//
2063//   * TooManyRequestsException
2064//
2065//   * ConflictException
2066//
2067// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/ListTagsForResource
2068func (c *MediaConvert) ListTagsForResource(input *ListTagsForResourceInput) (*ListTagsForResourceOutput, error) {
2069	req, out := c.ListTagsForResourceRequest(input)
2070	return out, req.Send()
2071}
2072
2073// ListTagsForResourceWithContext is the same as ListTagsForResource with the addition of
2074// the ability to pass a context and additional request options.
2075//
2076// See ListTagsForResource for details on how to use this API operation.
2077//
2078// The context must be non-nil and will be used for request cancellation. If
2079// the context is nil a panic will occur. In the future the SDK may create
2080// sub-contexts for http.Requests. See https://golang.org/pkg/context/
2081// for more information on using Contexts.
2082func (c *MediaConvert) ListTagsForResourceWithContext(ctx aws.Context, input *ListTagsForResourceInput, opts ...request.Option) (*ListTagsForResourceOutput, error) {
2083	req, out := c.ListTagsForResourceRequest(input)
2084	req.SetContext(ctx)
2085	req.ApplyOptions(opts...)
2086	return out, req.Send()
2087}
2088
2089const opTagResource = "TagResource"
2090
2091// TagResourceRequest generates a "aws/request.Request" representing the
2092// client's request for the TagResource operation. The "output" return
2093// value will be populated with the request's response once the request completes
2094// successfully.
2095//
2096// Use "Send" method on the returned Request to send the API call to the service.
2097// the "output" return value is not valid until after Send returns without error.
2098//
2099// See TagResource for more information on using the TagResource
2100// API call, and error handling.
2101//
2102// This method is useful when you want to inject custom logic or configuration
2103// into the SDK's request lifecycle. Such as custom headers, or retry logic.
2104//
2105//
2106//    // Example sending a request using the TagResourceRequest method.
2107//    req, resp := client.TagResourceRequest(params)
2108//
2109//    err := req.Send()
2110//    if err == nil { // resp is now filled
2111//        fmt.Println(resp)
2112//    }
2113//
2114// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/TagResource
2115func (c *MediaConvert) TagResourceRequest(input *TagResourceInput) (req *request.Request, output *TagResourceOutput) {
2116	op := &request.Operation{
2117		Name:       opTagResource,
2118		HTTPMethod: "POST",
2119		HTTPPath:   "/2017-08-29/tags",
2120	}
2121
2122	if input == nil {
2123		input = &TagResourceInput{}
2124	}
2125
2126	output = &TagResourceOutput{}
2127	req = c.newRequest(op, input, output)
2128	req.Handlers.Unmarshal.Swap(restjson.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler)
2129	return
2130}
2131
2132// TagResource API operation for AWS Elemental MediaConvert.
2133//
2134// Add tags to a MediaConvert queue, preset, or job template. For information
2135// about tagging, see the User Guide at https://docs.aws.amazon.com/mediaconvert/latest/ug/tagging-resources.html
2136//
2137// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
2138// with awserr.Error's Code and Message methods to get detailed information about
2139// the error.
2140//
2141// See the AWS API reference guide for AWS Elemental MediaConvert's
2142// API operation TagResource for usage and error information.
2143//
2144// Returned Error Types:
2145//   * BadRequestException
2146//
2147//   * InternalServerErrorException
2148//
2149//   * ForbiddenException
2150//
2151//   * NotFoundException
2152//
2153//   * TooManyRequestsException
2154//
2155//   * ConflictException
2156//
2157// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/TagResource
2158func (c *MediaConvert) TagResource(input *TagResourceInput) (*TagResourceOutput, error) {
2159	req, out := c.TagResourceRequest(input)
2160	return out, req.Send()
2161}
2162
2163// TagResourceWithContext is the same as TagResource with the addition of
2164// the ability to pass a context and additional request options.
2165//
2166// See TagResource for details on how to use this API operation.
2167//
2168// The context must be non-nil and will be used for request cancellation. If
2169// the context is nil a panic will occur. In the future the SDK may create
2170// sub-contexts for http.Requests. See https://golang.org/pkg/context/
2171// for more information on using Contexts.
2172func (c *MediaConvert) TagResourceWithContext(ctx aws.Context, input *TagResourceInput, opts ...request.Option) (*TagResourceOutput, error) {
2173	req, out := c.TagResourceRequest(input)
2174	req.SetContext(ctx)
2175	req.ApplyOptions(opts...)
2176	return out, req.Send()
2177}
2178
2179const opUntagResource = "UntagResource"
2180
2181// UntagResourceRequest generates a "aws/request.Request" representing the
2182// client's request for the UntagResource operation. The "output" return
2183// value will be populated with the request's response once the request completes
2184// successfully.
2185//
2186// Use "Send" method on the returned Request to send the API call to the service.
2187// the "output" return value is not valid until after Send returns without error.
2188//
2189// See UntagResource for more information on using the UntagResource
2190// API call, and error handling.
2191//
2192// This method is useful when you want to inject custom logic or configuration
2193// into the SDK's request lifecycle. Such as custom headers, or retry logic.
2194//
2195//
2196//    // Example sending a request using the UntagResourceRequest method.
2197//    req, resp := client.UntagResourceRequest(params)
2198//
2199//    err := req.Send()
2200//    if err == nil { // resp is now filled
2201//        fmt.Println(resp)
2202//    }
2203//
2204// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/UntagResource
2205func (c *MediaConvert) UntagResourceRequest(input *UntagResourceInput) (req *request.Request, output *UntagResourceOutput) {
2206	op := &request.Operation{
2207		Name:       opUntagResource,
2208		HTTPMethod: "PUT",
2209		HTTPPath:   "/2017-08-29/tags/{arn}",
2210	}
2211
2212	if input == nil {
2213		input = &UntagResourceInput{}
2214	}
2215
2216	output = &UntagResourceOutput{}
2217	req = c.newRequest(op, input, output)
2218	req.Handlers.Unmarshal.Swap(restjson.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler)
2219	return
2220}
2221
2222// UntagResource API operation for AWS Elemental MediaConvert.
2223//
2224// Remove tags from a MediaConvert queue, preset, or job template. For information
2225// about tagging, see the User Guide at https://docs.aws.amazon.com/mediaconvert/latest/ug/tagging-resources.html
2226//
2227// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
2228// with awserr.Error's Code and Message methods to get detailed information about
2229// the error.
2230//
2231// See the AWS API reference guide for AWS Elemental MediaConvert's
2232// API operation UntagResource for usage and error information.
2233//
2234// Returned Error Types:
2235//   * BadRequestException
2236//
2237//   * InternalServerErrorException
2238//
2239//   * ForbiddenException
2240//
2241//   * NotFoundException
2242//
2243//   * TooManyRequestsException
2244//
2245//   * ConflictException
2246//
2247// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/UntagResource
2248func (c *MediaConvert) UntagResource(input *UntagResourceInput) (*UntagResourceOutput, error) {
2249	req, out := c.UntagResourceRequest(input)
2250	return out, req.Send()
2251}
2252
2253// UntagResourceWithContext is the same as UntagResource with the addition of
2254// the ability to pass a context and additional request options.
2255//
2256// See UntagResource for details on how to use this API operation.
2257//
2258// The context must be non-nil and will be used for request cancellation. If
2259// the context is nil a panic will occur. In the future the SDK may create
2260// sub-contexts for http.Requests. See https://golang.org/pkg/context/
2261// for more information on using Contexts.
2262func (c *MediaConvert) UntagResourceWithContext(ctx aws.Context, input *UntagResourceInput, opts ...request.Option) (*UntagResourceOutput, error) {
2263	req, out := c.UntagResourceRequest(input)
2264	req.SetContext(ctx)
2265	req.ApplyOptions(opts...)
2266	return out, req.Send()
2267}
2268
2269const opUpdateJobTemplate = "UpdateJobTemplate"
2270
2271// UpdateJobTemplateRequest generates a "aws/request.Request" representing the
2272// client's request for the UpdateJobTemplate operation. The "output" return
2273// value will be populated with the request's response once the request completes
2274// successfully.
2275//
2276// Use "Send" method on the returned Request to send the API call to the service.
2277// the "output" return value is not valid until after Send returns without error.
2278//
2279// See UpdateJobTemplate for more information on using the UpdateJobTemplate
2280// API call, and error handling.
2281//
2282// This method is useful when you want to inject custom logic or configuration
2283// into the SDK's request lifecycle. Such as custom headers, or retry logic.
2284//
2285//
2286//    // Example sending a request using the UpdateJobTemplateRequest method.
2287//    req, resp := client.UpdateJobTemplateRequest(params)
2288//
2289//    err := req.Send()
2290//    if err == nil { // resp is now filled
2291//        fmt.Println(resp)
2292//    }
2293//
2294// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/UpdateJobTemplate
2295func (c *MediaConvert) UpdateJobTemplateRequest(input *UpdateJobTemplateInput) (req *request.Request, output *UpdateJobTemplateOutput) {
2296	op := &request.Operation{
2297		Name:       opUpdateJobTemplate,
2298		HTTPMethod: "PUT",
2299		HTTPPath:   "/2017-08-29/jobTemplates/{name}",
2300	}
2301
2302	if input == nil {
2303		input = &UpdateJobTemplateInput{}
2304	}
2305
2306	output = &UpdateJobTemplateOutput{}
2307	req = c.newRequest(op, input, output)
2308	return
2309}
2310
2311// UpdateJobTemplate API operation for AWS Elemental MediaConvert.
2312//
2313// Modify one of your existing job templates.
2314//
2315// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
2316// with awserr.Error's Code and Message methods to get detailed information about
2317// the error.
2318//
2319// See the AWS API reference guide for AWS Elemental MediaConvert's
2320// API operation UpdateJobTemplate for usage and error information.
2321//
2322// Returned Error Types:
2323//   * BadRequestException
2324//
2325//   * InternalServerErrorException
2326//
2327//   * ForbiddenException
2328//
2329//   * NotFoundException
2330//
2331//   * TooManyRequestsException
2332//
2333//   * ConflictException
2334//
2335// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/UpdateJobTemplate
2336func (c *MediaConvert) UpdateJobTemplate(input *UpdateJobTemplateInput) (*UpdateJobTemplateOutput, error) {
2337	req, out := c.UpdateJobTemplateRequest(input)
2338	return out, req.Send()
2339}
2340
2341// UpdateJobTemplateWithContext is the same as UpdateJobTemplate with the addition of
2342// the ability to pass a context and additional request options.
2343//
2344// See UpdateJobTemplate for details on how to use this API operation.
2345//
2346// The context must be non-nil and will be used for request cancellation. If
2347// the context is nil a panic will occur. In the future the SDK may create
2348// sub-contexts for http.Requests. See https://golang.org/pkg/context/
2349// for more information on using Contexts.
2350func (c *MediaConvert) UpdateJobTemplateWithContext(ctx aws.Context, input *UpdateJobTemplateInput, opts ...request.Option) (*UpdateJobTemplateOutput, error) {
2351	req, out := c.UpdateJobTemplateRequest(input)
2352	req.SetContext(ctx)
2353	req.ApplyOptions(opts...)
2354	return out, req.Send()
2355}
2356
2357const opUpdatePreset = "UpdatePreset"
2358
2359// UpdatePresetRequest generates a "aws/request.Request" representing the
2360// client's request for the UpdatePreset operation. The "output" return
2361// value will be populated with the request's response once the request completes
2362// successfully.
2363//
2364// Use "Send" method on the returned Request to send the API call to the service.
2365// the "output" return value is not valid until after Send returns without error.
2366//
2367// See UpdatePreset for more information on using the UpdatePreset
2368// API call, and error handling.
2369//
2370// This method is useful when you want to inject custom logic or configuration
2371// into the SDK's request lifecycle. Such as custom headers, or retry logic.
2372//
2373//
2374//    // Example sending a request using the UpdatePresetRequest method.
2375//    req, resp := client.UpdatePresetRequest(params)
2376//
2377//    err := req.Send()
2378//    if err == nil { // resp is now filled
2379//        fmt.Println(resp)
2380//    }
2381//
2382// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/UpdatePreset
2383func (c *MediaConvert) UpdatePresetRequest(input *UpdatePresetInput) (req *request.Request, output *UpdatePresetOutput) {
2384	op := &request.Operation{
2385		Name:       opUpdatePreset,
2386		HTTPMethod: "PUT",
2387		HTTPPath:   "/2017-08-29/presets/{name}",
2388	}
2389
2390	if input == nil {
2391		input = &UpdatePresetInput{}
2392	}
2393
2394	output = &UpdatePresetOutput{}
2395	req = c.newRequest(op, input, output)
2396	return
2397}
2398
2399// UpdatePreset API operation for AWS Elemental MediaConvert.
2400//
2401// Modify one of your existing presets.
2402//
2403// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
2404// with awserr.Error's Code and Message methods to get detailed information about
2405// the error.
2406//
2407// See the AWS API reference guide for AWS Elemental MediaConvert's
2408// API operation UpdatePreset for usage and error information.
2409//
2410// Returned Error Types:
2411//   * BadRequestException
2412//
2413//   * InternalServerErrorException
2414//
2415//   * ForbiddenException
2416//
2417//   * NotFoundException
2418//
2419//   * TooManyRequestsException
2420//
2421//   * ConflictException
2422//
2423// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/UpdatePreset
2424func (c *MediaConvert) UpdatePreset(input *UpdatePresetInput) (*UpdatePresetOutput, error) {
2425	req, out := c.UpdatePresetRequest(input)
2426	return out, req.Send()
2427}
2428
2429// UpdatePresetWithContext is the same as UpdatePreset with the addition of
2430// the ability to pass a context and additional request options.
2431//
2432// See UpdatePreset for details on how to use this API operation.
2433//
2434// The context must be non-nil and will be used for request cancellation. If
2435// the context is nil a panic will occur. In the future the SDK may create
2436// sub-contexts for http.Requests. See https://golang.org/pkg/context/
2437// for more information on using Contexts.
2438func (c *MediaConvert) UpdatePresetWithContext(ctx aws.Context, input *UpdatePresetInput, opts ...request.Option) (*UpdatePresetOutput, error) {
2439	req, out := c.UpdatePresetRequest(input)
2440	req.SetContext(ctx)
2441	req.ApplyOptions(opts...)
2442	return out, req.Send()
2443}
2444
2445const opUpdateQueue = "UpdateQueue"
2446
2447// UpdateQueueRequest generates a "aws/request.Request" representing the
2448// client's request for the UpdateQueue operation. The "output" return
2449// value will be populated with the request's response once the request completes
2450// successfully.
2451//
2452// Use "Send" method on the returned Request to send the API call to the service.
2453// the "output" return value is not valid until after Send returns without error.
2454//
2455// See UpdateQueue for more information on using the UpdateQueue
2456// API call, and error handling.
2457//
2458// This method is useful when you want to inject custom logic or configuration
2459// into the SDK's request lifecycle. Such as custom headers, or retry logic.
2460//
2461//
2462//    // Example sending a request using the UpdateQueueRequest method.
2463//    req, resp := client.UpdateQueueRequest(params)
2464//
2465//    err := req.Send()
2466//    if err == nil { // resp is now filled
2467//        fmt.Println(resp)
2468//    }
2469//
2470// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/UpdateQueue
2471func (c *MediaConvert) UpdateQueueRequest(input *UpdateQueueInput) (req *request.Request, output *UpdateQueueOutput) {
2472	op := &request.Operation{
2473		Name:       opUpdateQueue,
2474		HTTPMethod: "PUT",
2475		HTTPPath:   "/2017-08-29/queues/{name}",
2476	}
2477
2478	if input == nil {
2479		input = &UpdateQueueInput{}
2480	}
2481
2482	output = &UpdateQueueOutput{}
2483	req = c.newRequest(op, input, output)
2484	return
2485}
2486
2487// UpdateQueue API operation for AWS Elemental MediaConvert.
2488//
2489// Modify one of your existing queues.
2490//
2491// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
2492// with awserr.Error's Code and Message methods to get detailed information about
2493// the error.
2494//
2495// See the AWS API reference guide for AWS Elemental MediaConvert's
2496// API operation UpdateQueue for usage and error information.
2497//
2498// Returned Error Types:
2499//   * BadRequestException
2500//
2501//   * InternalServerErrorException
2502//
2503//   * ForbiddenException
2504//
2505//   * NotFoundException
2506//
2507//   * TooManyRequestsException
2508//
2509//   * ConflictException
2510//
2511// See also, https://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/UpdateQueue
2512func (c *MediaConvert) UpdateQueue(input *UpdateQueueInput) (*UpdateQueueOutput, error) {
2513	req, out := c.UpdateQueueRequest(input)
2514	return out, req.Send()
2515}
2516
2517// UpdateQueueWithContext is the same as UpdateQueue with the addition of
2518// the ability to pass a context and additional request options.
2519//
2520// See UpdateQueue for details on how to use this API operation.
2521//
2522// The context must be non-nil and will be used for request cancellation. If
2523// the context is nil a panic will occur. In the future the SDK may create
2524// sub-contexts for http.Requests. See https://golang.org/pkg/context/
2525// for more information on using Contexts.
2526func (c *MediaConvert) UpdateQueueWithContext(ctx aws.Context, input *UpdateQueueInput, opts ...request.Option) (*UpdateQueueOutput, error) {
2527	req, out := c.UpdateQueueRequest(input)
2528	req.SetContext(ctx)
2529	req.ApplyOptions(opts...)
2530	return out, req.Send()
2531}
2532
2533// Required when you set (Codec) under (AudioDescriptions)>(CodecSettings) to
2534// the value AAC. The service accepts one of two mutually exclusive groups of
2535// AAC settings--VBR and CBR. To select one of these modes, set the value of
2536// Bitrate control mode (rateControlMode) to "VBR" or "CBR". In VBR mode, you
2537// control the audio quality with the setting VBR quality (vbrQuality). In CBR
2538// mode, you use the setting Bitrate (bitrate). Defaults and valid values depend
2539// on the rate control mode.
2540type AacSettings struct {
2541	_ struct{} `type:"structure"`
2542
2543	// Choose BROADCASTER_MIXED_AD when the input contains pre-mixed main audio
2544	// + audio description (AD) as a stereo pair. The value for AudioType will be
2545	// set to 3, which signals to downstream systems that this stream contains "broadcaster
2546	// mixed AD". Note that the input received by the encoder must contain pre-mixed
2547	// audio; the encoder does not perform the mixing. When you choose BROADCASTER_MIXED_AD,
2548	// the encoder ignores any values you provide in AudioType and FollowInputAudioType.
2549	// Choose NORMAL when the input does not contain pre-mixed audio + audio description
2550	// (AD). In this case, the encoder will use any values you provide for AudioType
2551	// and FollowInputAudioType.
2552	AudioDescriptionBroadcasterMix *string `locationName:"audioDescriptionBroadcasterMix" type:"string" enum:"AacAudioDescriptionBroadcasterMix"`
2553
2554	// Specify the average bitrate in bits per second. The set of valid values for
2555	// this setting is: 6000, 8000, 10000, 12000, 14000, 16000, 20000, 24000, 28000,
2556	// 32000, 40000, 48000, 56000, 64000, 80000, 96000, 112000, 128000, 160000,
2557	// 192000, 224000, 256000, 288000, 320000, 384000, 448000, 512000, 576000, 640000,
2558	// 768000, 896000, 1024000. The value you set is also constrained by the values
2559	// that you choose for Profile (codecProfile), Bitrate control mode (codingMode),
2560	// and Sample rate (sampleRate). Default values depend on Bitrate control mode
2561	// and Profile.
2562	Bitrate *int64 `locationName:"bitrate" min:"6000" type:"integer"`
2563
2564	// AAC Profile.
2565	CodecProfile *string `locationName:"codecProfile" type:"string" enum:"AacCodecProfile"`
2566
2567	// Mono (Audio Description), Mono, Stereo, or 5.1 channel layout. Valid values
2568	// depend on rate control mode and profile. "1.0 - Audio Description (Receiver
2569	// Mix)" setting receives a stereo description plus control track and emits
2570	// a mono AAC encode of the description track, with control data emitted in
2571	// the PES header as per ETSI TS 101 154 Annex E.
2572	CodingMode *string `locationName:"codingMode" type:"string" enum:"AacCodingMode"`
2573
2574	// Rate Control Mode.
2575	RateControlMode *string `locationName:"rateControlMode" type:"string" enum:"AacRateControlMode"`
2576
2577	// Enables LATM/LOAS AAC output. Note that if you use LATM/LOAS AAC in an output,
2578	// you must choose "No container" for the output container.
2579	RawFormat *string `locationName:"rawFormat" type:"string" enum:"AacRawFormat"`
2580
2581	// Sample rate in Hz. Valid values depend on rate control mode and profile.
2582	SampleRate *int64 `locationName:"sampleRate" min:"8000" type:"integer"`
2583
2584	// Use MPEG-2 AAC instead of MPEG-4 AAC audio for raw or MPEG-2 Transport Stream
2585	// containers.
2586	Specification *string `locationName:"specification" type:"string" enum:"AacSpecification"`
2587
2588	// VBR Quality Level - Only used if rate_control_mode is VBR.
2589	VbrQuality *string `locationName:"vbrQuality" type:"string" enum:"AacVbrQuality"`
2590}
2591
2592// String returns the string representation
2593func (s AacSettings) String() string {
2594	return awsutil.Prettify(s)
2595}
2596
2597// GoString returns the string representation
2598func (s AacSettings) GoString() string {
2599	return s.String()
2600}
2601
2602// Validate inspects the fields of the type to determine if they are valid.
2603func (s *AacSettings) Validate() error {
2604	invalidParams := request.ErrInvalidParams{Context: "AacSettings"}
2605	if s.Bitrate != nil && *s.Bitrate < 6000 {
2606		invalidParams.Add(request.NewErrParamMinValue("Bitrate", 6000))
2607	}
2608	if s.SampleRate != nil && *s.SampleRate < 8000 {
2609		invalidParams.Add(request.NewErrParamMinValue("SampleRate", 8000))
2610	}
2611
2612	if invalidParams.Len() > 0 {
2613		return invalidParams
2614	}
2615	return nil
2616}
2617
2618// SetAudioDescriptionBroadcasterMix sets the AudioDescriptionBroadcasterMix field's value.
2619func (s *AacSettings) SetAudioDescriptionBroadcasterMix(v string) *AacSettings {
2620	s.AudioDescriptionBroadcasterMix = &v
2621	return s
2622}
2623
2624// SetBitrate sets the Bitrate field's value.
2625func (s *AacSettings) SetBitrate(v int64) *AacSettings {
2626	s.Bitrate = &v
2627	return s
2628}
2629
2630// SetCodecProfile sets the CodecProfile field's value.
2631func (s *AacSettings) SetCodecProfile(v string) *AacSettings {
2632	s.CodecProfile = &v
2633	return s
2634}
2635
2636// SetCodingMode sets the CodingMode field's value.
2637func (s *AacSettings) SetCodingMode(v string) *AacSettings {
2638	s.CodingMode = &v
2639	return s
2640}
2641
2642// SetRateControlMode sets the RateControlMode field's value.
2643func (s *AacSettings) SetRateControlMode(v string) *AacSettings {
2644	s.RateControlMode = &v
2645	return s
2646}
2647
2648// SetRawFormat sets the RawFormat field's value.
2649func (s *AacSettings) SetRawFormat(v string) *AacSettings {
2650	s.RawFormat = &v
2651	return s
2652}
2653
2654// SetSampleRate sets the SampleRate field's value.
2655func (s *AacSettings) SetSampleRate(v int64) *AacSettings {
2656	s.SampleRate = &v
2657	return s
2658}
2659
2660// SetSpecification sets the Specification field's value.
2661func (s *AacSettings) SetSpecification(v string) *AacSettings {
2662	s.Specification = &v
2663	return s
2664}
2665
2666// SetVbrQuality sets the VbrQuality field's value.
2667func (s *AacSettings) SetVbrQuality(v string) *AacSettings {
2668	s.VbrQuality = &v
2669	return s
2670}
2671
2672// Required when you set (Codec) under (AudioDescriptions)>(CodecSettings) to
2673// the value AC3.
2674type Ac3Settings struct {
2675	_ struct{} `type:"structure"`
2676
2677	// Specify the average bitrate in bits per second. Valid bitrates depend on
2678	// the coding mode.
2679	Bitrate *int64 `locationName:"bitrate" min:"64000" type:"integer"`
2680
2681	// Specify the bitstream mode for the AC-3 stream that the encoder emits. For
2682	// more information about the AC3 bitstream mode, see ATSC A/52-2012 (Annex
2683	// E).
2684	BitstreamMode *string `locationName:"bitstreamMode" type:"string" enum:"Ac3BitstreamMode"`
2685
2686	// Dolby Digital coding mode. Determines number of channels.
2687	CodingMode *string `locationName:"codingMode" type:"string" enum:"Ac3CodingMode"`
2688
2689	// Sets the dialnorm for the output. If blank and input audio is Dolby Digital,
2690	// dialnorm will be passed through.
2691	Dialnorm *int64 `locationName:"dialnorm" min:"1" type:"integer"`
2692
2693	// If set to FILM_STANDARD, adds dynamic range compression signaling to the
2694	// output bitstream as defined in the Dolby Digital specification.
2695	DynamicRangeCompressionProfile *string `locationName:"dynamicRangeCompressionProfile" type:"string" enum:"Ac3DynamicRangeCompressionProfile"`
2696
2697	// Applies a 120Hz lowpass filter to the LFE channel prior to encoding. Only
2698	// valid with 3_2_LFE coding mode.
2699	LfeFilter *string `locationName:"lfeFilter" type:"string" enum:"Ac3LfeFilter"`
2700
2701	// When set to FOLLOW_INPUT, encoder metadata will be sourced from the DD, DD+,
2702	// or DolbyE decoder that supplied this audio data. If audio was not supplied
2703	// from one of these streams, then the static metadata settings will be used.
2704	MetadataControl *string `locationName:"metadataControl" type:"string" enum:"Ac3MetadataControl"`
2705
2706	// This value is always 48000. It represents the sample rate in Hz.
2707	SampleRate *int64 `locationName:"sampleRate" min:"48000" type:"integer"`
2708}
2709
2710// String returns the string representation
2711func (s Ac3Settings) String() string {
2712	return awsutil.Prettify(s)
2713}
2714
2715// GoString returns the string representation
2716func (s Ac3Settings) GoString() string {
2717	return s.String()
2718}
2719
2720// Validate inspects the fields of the type to determine if they are valid.
2721func (s *Ac3Settings) Validate() error {
2722	invalidParams := request.ErrInvalidParams{Context: "Ac3Settings"}
2723	if s.Bitrate != nil && *s.Bitrate < 64000 {
2724		invalidParams.Add(request.NewErrParamMinValue("Bitrate", 64000))
2725	}
2726	if s.Dialnorm != nil && *s.Dialnorm < 1 {
2727		invalidParams.Add(request.NewErrParamMinValue("Dialnorm", 1))
2728	}
2729	if s.SampleRate != nil && *s.SampleRate < 48000 {
2730		invalidParams.Add(request.NewErrParamMinValue("SampleRate", 48000))
2731	}
2732
2733	if invalidParams.Len() > 0 {
2734		return invalidParams
2735	}
2736	return nil
2737}
2738
2739// SetBitrate sets the Bitrate field's value.
2740func (s *Ac3Settings) SetBitrate(v int64) *Ac3Settings {
2741	s.Bitrate = &v
2742	return s
2743}
2744
2745// SetBitstreamMode sets the BitstreamMode field's value.
2746func (s *Ac3Settings) SetBitstreamMode(v string) *Ac3Settings {
2747	s.BitstreamMode = &v
2748	return s
2749}
2750
2751// SetCodingMode sets the CodingMode field's value.
2752func (s *Ac3Settings) SetCodingMode(v string) *Ac3Settings {
2753	s.CodingMode = &v
2754	return s
2755}
2756
2757// SetDialnorm sets the Dialnorm field's value.
2758func (s *Ac3Settings) SetDialnorm(v int64) *Ac3Settings {
2759	s.Dialnorm = &v
2760	return s
2761}
2762
2763// SetDynamicRangeCompressionProfile sets the DynamicRangeCompressionProfile field's value.
2764func (s *Ac3Settings) SetDynamicRangeCompressionProfile(v string) *Ac3Settings {
2765	s.DynamicRangeCompressionProfile = &v
2766	return s
2767}
2768
2769// SetLfeFilter sets the LfeFilter field's value.
2770func (s *Ac3Settings) SetLfeFilter(v string) *Ac3Settings {
2771	s.LfeFilter = &v
2772	return s
2773}
2774
2775// SetMetadataControl sets the MetadataControl field's value.
2776func (s *Ac3Settings) SetMetadataControl(v string) *Ac3Settings {
2777	s.MetadataControl = &v
2778	return s
2779}
2780
2781// SetSampleRate sets the SampleRate field's value.
2782func (s *Ac3Settings) SetSampleRate(v int64) *Ac3Settings {
2783	s.SampleRate = &v
2784	return s
2785}
2786
2787// Accelerated transcoding can significantly speed up jobs with long, visually
2788// complex content.
2789type AccelerationSettings struct {
2790	_ struct{} `type:"structure"`
2791
2792	// Specify the conditions when the service will run your job with accelerated
2793	// transcoding.
2794	//
2795	// Mode is a required field
2796	Mode *string `locationName:"mode" type:"string" required:"true" enum:"AccelerationMode"`
2797}
2798
2799// String returns the string representation
2800func (s AccelerationSettings) String() string {
2801	return awsutil.Prettify(s)
2802}
2803
2804// GoString returns the string representation
2805func (s AccelerationSettings) GoString() string {
2806	return s.String()
2807}
2808
2809// Validate inspects the fields of the type to determine if they are valid.
2810func (s *AccelerationSettings) Validate() error {
2811	invalidParams := request.ErrInvalidParams{Context: "AccelerationSettings"}
2812	if s.Mode == nil {
2813		invalidParams.Add(request.NewErrParamRequired("Mode"))
2814	}
2815
2816	if invalidParams.Len() > 0 {
2817		return invalidParams
2818	}
2819	return nil
2820}
2821
2822// SetMode sets the Mode field's value.
2823func (s *AccelerationSettings) SetMode(v string) *AccelerationSettings {
2824	s.Mode = &v
2825	return s
2826}
2827
2828// Required when you set (Codec) under (AudioDescriptions)>(CodecSettings) to
2829// the value AIFF.
2830type AiffSettings struct {
2831	_ struct{} `type:"structure"`
2832
2833	// Specify Bit depth (BitDepth), in bits per sample, to choose the encoding
2834	// quality for this audio track.
2835	BitDepth *int64 `locationName:"bitDepth" min:"16" type:"integer"`
2836
2837	// Specify the number of channels in this output audio track. Valid values are
2838	// 1 and even numbers up to 64. For example, 1, 2, 4, 6, and so on, up to 64.
2839	Channels *int64 `locationName:"channels" min:"1" type:"integer"`
2840
2841	// Sample rate in hz.
2842	SampleRate *int64 `locationName:"sampleRate" min:"8000" type:"integer"`
2843}
2844
2845// String returns the string representation
2846func (s AiffSettings) String() string {
2847	return awsutil.Prettify(s)
2848}
2849
2850// GoString returns the string representation
2851func (s AiffSettings) GoString() string {
2852	return s.String()
2853}
2854
2855// Validate inspects the fields of the type to determine if they are valid.
2856func (s *AiffSettings) Validate() error {
2857	invalidParams := request.ErrInvalidParams{Context: "AiffSettings"}
2858	if s.BitDepth != nil && *s.BitDepth < 16 {
2859		invalidParams.Add(request.NewErrParamMinValue("BitDepth", 16))
2860	}
2861	if s.Channels != nil && *s.Channels < 1 {
2862		invalidParams.Add(request.NewErrParamMinValue("Channels", 1))
2863	}
2864	if s.SampleRate != nil && *s.SampleRate < 8000 {
2865		invalidParams.Add(request.NewErrParamMinValue("SampleRate", 8000))
2866	}
2867
2868	if invalidParams.Len() > 0 {
2869		return invalidParams
2870	}
2871	return nil
2872}
2873
2874// SetBitDepth sets the BitDepth field's value.
2875func (s *AiffSettings) SetBitDepth(v int64) *AiffSettings {
2876	s.BitDepth = &v
2877	return s
2878}
2879
2880// SetChannels sets the Channels field's value.
2881func (s *AiffSettings) SetChannels(v int64) *AiffSettings {
2882	s.Channels = &v
2883	return s
2884}
2885
2886// SetSampleRate sets the SampleRate field's value.
2887func (s *AiffSettings) SetSampleRate(v int64) *AiffSettings {
2888	s.SampleRate = &v
2889	return s
2890}
2891
2892// Settings for ancillary captions source.
2893type AncillarySourceSettings struct {
2894	_ struct{} `type:"structure"`
2895
2896	// Specify whether this set of input captions appears in your outputs in both
2897	// 608 and 708 format. If you choose Upconvert (UPCONVERT), MediaConvert includes
2898	// the captions data in two ways: it passes the 608 data through using the 608
2899	// compatibility bytes fields of the 708 wrapper, and it also translates the
2900	// 608 data into 708.
2901	Convert608To708 *string `locationName:"convert608To708" type:"string" enum:"AncillaryConvert608To708"`
2902
2903	// Specifies the 608 channel number in the ancillary data track from which to
2904	// extract captions. Unused for passthrough.
2905	SourceAncillaryChannelNumber *int64 `locationName:"sourceAncillaryChannelNumber" min:"1" type:"integer"`
2906
2907	// By default, the service terminates any unterminated captions at the end of
2908	// each input. If you want the caption to continue onto your next input, disable
2909	// this setting.
2910	TerminateCaptions *string `locationName:"terminateCaptions" type:"string" enum:"AncillaryTerminateCaptions"`
2911}
2912
2913// String returns the string representation
2914func (s AncillarySourceSettings) String() string {
2915	return awsutil.Prettify(s)
2916}
2917
2918// GoString returns the string representation
2919func (s AncillarySourceSettings) GoString() string {
2920	return s.String()
2921}
2922
2923// Validate inspects the fields of the type to determine if they are valid.
2924func (s *AncillarySourceSettings) Validate() error {
2925	invalidParams := request.ErrInvalidParams{Context: "AncillarySourceSettings"}
2926	if s.SourceAncillaryChannelNumber != nil && *s.SourceAncillaryChannelNumber < 1 {
2927		invalidParams.Add(request.NewErrParamMinValue("SourceAncillaryChannelNumber", 1))
2928	}
2929
2930	if invalidParams.Len() > 0 {
2931		return invalidParams
2932	}
2933	return nil
2934}
2935
2936// SetConvert608To708 sets the Convert608To708 field's value.
2937func (s *AncillarySourceSettings) SetConvert608To708(v string) *AncillarySourceSettings {
2938	s.Convert608To708 = &v
2939	return s
2940}
2941
2942// SetSourceAncillaryChannelNumber sets the SourceAncillaryChannelNumber field's value.
2943func (s *AncillarySourceSettings) SetSourceAncillaryChannelNumber(v int64) *AncillarySourceSettings {
2944	s.SourceAncillaryChannelNumber = &v
2945	return s
2946}
2947
2948// SetTerminateCaptions sets the TerminateCaptions field's value.
2949func (s *AncillarySourceSettings) SetTerminateCaptions(v string) *AncillarySourceSettings {
2950	s.TerminateCaptions = &v
2951	return s
2952}
2953
2954// Associates the Amazon Resource Name (ARN) of an AWS Certificate Manager (ACM)
2955// certificate with an AWS Elemental MediaConvert resource.
2956type AssociateCertificateInput struct {
2957	_ struct{} `type:"structure"`
2958
2959	// The ARN of the ACM certificate that you want to associate with your MediaConvert
2960	// resource.
2961	//
2962	// Arn is a required field
2963	Arn *string `locationName:"arn" type:"string" required:"true"`
2964}
2965
2966// String returns the string representation
2967func (s AssociateCertificateInput) String() string {
2968	return awsutil.Prettify(s)
2969}
2970
2971// GoString returns the string representation
2972func (s AssociateCertificateInput) GoString() string {
2973	return s.String()
2974}
2975
2976// Validate inspects the fields of the type to determine if they are valid.
2977func (s *AssociateCertificateInput) Validate() error {
2978	invalidParams := request.ErrInvalidParams{Context: "AssociateCertificateInput"}
2979	if s.Arn == nil {
2980		invalidParams.Add(request.NewErrParamRequired("Arn"))
2981	}
2982
2983	if invalidParams.Len() > 0 {
2984		return invalidParams
2985	}
2986	return nil
2987}
2988
2989// SetArn sets the Arn field's value.
2990func (s *AssociateCertificateInput) SetArn(v string) *AssociateCertificateInput {
2991	s.Arn = &v
2992	return s
2993}
2994
2995// Successful association of Certificate Manager Amazon Resource Name (ARN)
2996// with Mediaconvert returns an OK message.
2997type AssociateCertificateOutput struct {
2998	_ struct{} `type:"structure"`
2999}
3000
3001// String returns the string representation
3002func (s AssociateCertificateOutput) String() string {
3003	return awsutil.Prettify(s)
3004}
3005
3006// GoString returns the string representation
3007func (s AssociateCertificateOutput) GoString() string {
3008	return s.String()
3009}
3010
3011// When you mimic a multi-channel audio layout with multiple mono-channel tracks,
3012// you can tag each channel layout manually. For example, you would tag the
3013// tracks that contain your left, right, and center audio with Left (L), Right
3014// (R), and Center (C), respectively. When you don't specify a value, MediaConvert
3015// labels your track as Center (C) by default. To use audio layout tagging,
3016// your output must be in a QuickTime (.mov) container; your audio codec must
3017// be AAC, WAV, or AIFF; and you must set up your audio track to have only one
3018// channel.
3019type AudioChannelTaggingSettings struct {
3020	_ struct{} `type:"structure"`
3021
3022	// You can add a tag for this mono-channel audio track to mimic its placement
3023	// in a multi-channel layout. For example, if this track is the left surround
3024	// channel, choose Left surround (LS).
3025	ChannelTag *string `locationName:"channelTag" type:"string" enum:"AudioChannelTag"`
3026}
3027
3028// String returns the string representation
3029func (s AudioChannelTaggingSettings) String() string {
3030	return awsutil.Prettify(s)
3031}
3032
3033// GoString returns the string representation
3034func (s AudioChannelTaggingSettings) GoString() string {
3035	return s.String()
3036}
3037
3038// SetChannelTag sets the ChannelTag field's value.
3039func (s *AudioChannelTaggingSettings) SetChannelTag(v string) *AudioChannelTaggingSettings {
3040	s.ChannelTag = &v
3041	return s
3042}
3043
3044// Audio codec settings (CodecSettings) under (AudioDescriptions) contains the
3045// group of settings related to audio encoding. The settings in this group vary
3046// depending on the value that you choose for Audio codec (Codec). For each
3047// codec enum that you choose, define the corresponding settings object. The
3048// following lists the codec enum, settings object pairs. * AAC, AacSettings
3049// * MP2, Mp2Settings * MP3, Mp3Settings * WAV, WavSettings * AIFF, AiffSettings
3050// * AC3, Ac3Settings * EAC3, Eac3Settings * EAC3_ATMOS, Eac3AtmosSettings *
3051// VORBIS, VorbisSettings * OPUS, OpusSettings
3052type AudioCodecSettings struct {
3053	_ struct{} `type:"structure"`
3054
3055	// Required when you set (Codec) under (AudioDescriptions)>(CodecSettings) to
3056	// the value AAC. The service accepts one of two mutually exclusive groups of
3057	// AAC settings--VBR and CBR. To select one of these modes, set the value of
3058	// Bitrate control mode (rateControlMode) to "VBR" or "CBR". In VBR mode, you
3059	// control the audio quality with the setting VBR quality (vbrQuality). In CBR
3060	// mode, you use the setting Bitrate (bitrate). Defaults and valid values depend
3061	// on the rate control mode.
3062	AacSettings *AacSettings `locationName:"aacSettings" type:"structure"`
3063
3064	// Required when you set (Codec) under (AudioDescriptions)>(CodecSettings) to
3065	// the value AC3.
3066	Ac3Settings *Ac3Settings `locationName:"ac3Settings" type:"structure"`
3067
3068	// Required when you set (Codec) under (AudioDescriptions)>(CodecSettings) to
3069	// the value AIFF.
3070	AiffSettings *AiffSettings `locationName:"aiffSettings" type:"structure"`
3071
3072	// Type of Audio codec.
3073	Codec *string `locationName:"codec" type:"string" enum:"AudioCodec"`
3074
3075	// Required when you set (Codec) under (AudioDescriptions)>(CodecSettings) to
3076	// the value EAC3_ATMOS.
3077	Eac3AtmosSettings *Eac3AtmosSettings `locationName:"eac3AtmosSettings" type:"structure"`
3078
3079	// Required when you set (Codec) under (AudioDescriptions)>(CodecSettings) to
3080	// the value EAC3.
3081	Eac3Settings *Eac3Settings `locationName:"eac3Settings" type:"structure"`
3082
3083	// Required when you set (Codec) under (AudioDescriptions)>(CodecSettings) to
3084	// the value MP2.
3085	Mp2Settings *Mp2Settings `locationName:"mp2Settings" type:"structure"`
3086
3087	// Required when you set Codec, under AudioDescriptions>CodecSettings, to the
3088	// value MP3.
3089	Mp3Settings *Mp3Settings `locationName:"mp3Settings" type:"structure"`
3090
3091	// Required when you set Codec, under AudioDescriptions>CodecSettings, to the
3092	// value OPUS.
3093	OpusSettings *OpusSettings `locationName:"opusSettings" type:"structure"`
3094
3095	// Required when you set Codec, under AudioDescriptions>CodecSettings, to the
3096	// value Vorbis.
3097	VorbisSettings *VorbisSettings `locationName:"vorbisSettings" type:"structure"`
3098
3099	// Required when you set (Codec) under (AudioDescriptions)>(CodecSettings) to
3100	// the value WAV.
3101	WavSettings *WavSettings `locationName:"wavSettings" type:"structure"`
3102}
3103
3104// String returns the string representation
3105func (s AudioCodecSettings) String() string {
3106	return awsutil.Prettify(s)
3107}
3108
3109// GoString returns the string representation
3110func (s AudioCodecSettings) GoString() string {
3111	return s.String()
3112}
3113
3114// Validate inspects the fields of the type to determine if they are valid.
3115func (s *AudioCodecSettings) Validate() error {
3116	invalidParams := request.ErrInvalidParams{Context: "AudioCodecSettings"}
3117	if s.AacSettings != nil {
3118		if err := s.AacSettings.Validate(); err != nil {
3119			invalidParams.AddNested("AacSettings", err.(request.ErrInvalidParams))
3120		}
3121	}
3122	if s.Ac3Settings != nil {
3123		if err := s.Ac3Settings.Validate(); err != nil {
3124			invalidParams.AddNested("Ac3Settings", err.(request.ErrInvalidParams))
3125		}
3126	}
3127	if s.AiffSettings != nil {
3128		if err := s.AiffSettings.Validate(); err != nil {
3129			invalidParams.AddNested("AiffSettings", err.(request.ErrInvalidParams))
3130		}
3131	}
3132	if s.Eac3AtmosSettings != nil {
3133		if err := s.Eac3AtmosSettings.Validate(); err != nil {
3134			invalidParams.AddNested("Eac3AtmosSettings", err.(request.ErrInvalidParams))
3135		}
3136	}
3137	if s.Eac3Settings != nil {
3138		if err := s.Eac3Settings.Validate(); err != nil {
3139			invalidParams.AddNested("Eac3Settings", err.(request.ErrInvalidParams))
3140		}
3141	}
3142	if s.Mp2Settings != nil {
3143		if err := s.Mp2Settings.Validate(); err != nil {
3144			invalidParams.AddNested("Mp2Settings", err.(request.ErrInvalidParams))
3145		}
3146	}
3147	if s.Mp3Settings != nil {
3148		if err := s.Mp3Settings.Validate(); err != nil {
3149			invalidParams.AddNested("Mp3Settings", err.(request.ErrInvalidParams))
3150		}
3151	}
3152	if s.OpusSettings != nil {
3153		if err := s.OpusSettings.Validate(); err != nil {
3154			invalidParams.AddNested("OpusSettings", err.(request.ErrInvalidParams))
3155		}
3156	}
3157	if s.VorbisSettings != nil {
3158		if err := s.VorbisSettings.Validate(); err != nil {
3159			invalidParams.AddNested("VorbisSettings", err.(request.ErrInvalidParams))
3160		}
3161	}
3162	if s.WavSettings != nil {
3163		if err := s.WavSettings.Validate(); err != nil {
3164			invalidParams.AddNested("WavSettings", err.(request.ErrInvalidParams))
3165		}
3166	}
3167
3168	if invalidParams.Len() > 0 {
3169		return invalidParams
3170	}
3171	return nil
3172}
3173
3174// SetAacSettings sets the AacSettings field's value.
3175func (s *AudioCodecSettings) SetAacSettings(v *AacSettings) *AudioCodecSettings {
3176	s.AacSettings = v
3177	return s
3178}
3179
3180// SetAc3Settings sets the Ac3Settings field's value.
3181func (s *AudioCodecSettings) SetAc3Settings(v *Ac3Settings) *AudioCodecSettings {
3182	s.Ac3Settings = v
3183	return s
3184}
3185
3186// SetAiffSettings sets the AiffSettings field's value.
3187func (s *AudioCodecSettings) SetAiffSettings(v *AiffSettings) *AudioCodecSettings {
3188	s.AiffSettings = v
3189	return s
3190}
3191
3192// SetCodec sets the Codec field's value.
3193func (s *AudioCodecSettings) SetCodec(v string) *AudioCodecSettings {
3194	s.Codec = &v
3195	return s
3196}
3197
3198// SetEac3AtmosSettings sets the Eac3AtmosSettings field's value.
3199func (s *AudioCodecSettings) SetEac3AtmosSettings(v *Eac3AtmosSettings) *AudioCodecSettings {
3200	s.Eac3AtmosSettings = v
3201	return s
3202}
3203
3204// SetEac3Settings sets the Eac3Settings field's value.
3205func (s *AudioCodecSettings) SetEac3Settings(v *Eac3Settings) *AudioCodecSettings {
3206	s.Eac3Settings = v
3207	return s
3208}
3209
3210// SetMp2Settings sets the Mp2Settings field's value.
3211func (s *AudioCodecSettings) SetMp2Settings(v *Mp2Settings) *AudioCodecSettings {
3212	s.Mp2Settings = v
3213	return s
3214}
3215
3216// SetMp3Settings sets the Mp3Settings field's value.
3217func (s *AudioCodecSettings) SetMp3Settings(v *Mp3Settings) *AudioCodecSettings {
3218	s.Mp3Settings = v
3219	return s
3220}
3221
3222// SetOpusSettings sets the OpusSettings field's value.
3223func (s *AudioCodecSettings) SetOpusSettings(v *OpusSettings) *AudioCodecSettings {
3224	s.OpusSettings = v
3225	return s
3226}
3227
3228// SetVorbisSettings sets the VorbisSettings field's value.
3229func (s *AudioCodecSettings) SetVorbisSettings(v *VorbisSettings) *AudioCodecSettings {
3230	s.VorbisSettings = v
3231	return s
3232}
3233
3234// SetWavSettings sets the WavSettings field's value.
3235func (s *AudioCodecSettings) SetWavSettings(v *WavSettings) *AudioCodecSettings {
3236	s.WavSettings = v
3237	return s
3238}
3239
3240// Description of audio output
3241type AudioDescription struct {
3242	_ struct{} `type:"structure"`
3243
3244	// When you mimic a multi-channel audio layout with multiple mono-channel tracks,
3245	// you can tag each channel layout manually. For example, you would tag the
3246	// tracks that contain your left, right, and center audio with Left (L), Right
3247	// (R), and Center (C), respectively. When you don't specify a value, MediaConvert
3248	// labels your track as Center (C) by default. To use audio layout tagging,
3249	// your output must be in a QuickTime (.mov) container; your audio codec must
3250	// be AAC, WAV, or AIFF; and you must set up your audio track to have only one
3251	// channel.
3252	AudioChannelTaggingSettings *AudioChannelTaggingSettings `locationName:"audioChannelTaggingSettings" type:"structure"`
3253
3254	// Advanced audio normalization settings. Ignore these settings unless you need
3255	// to comply with a loudness standard.
3256	AudioNormalizationSettings *AudioNormalizationSettings `locationName:"audioNormalizationSettings" type:"structure"`
3257
3258	// Specifies which audio data to use from each input. In the simplest case,
3259	// specify an "Audio Selector":#inputs-audio_selector by name based on its order
3260	// within each input. For example if you specify "Audio Selector 3", then the
3261	// third audio selector will be used from each input. If an input does not have
3262	// an "Audio Selector 3", then the audio selector marked as "default" in that
3263	// input will be used. If there is no audio selector marked as "default", silence
3264	// will be inserted for the duration of that input. Alternatively, an "Audio
3265	// Selector Group":#inputs-audio_selector_group name may be specified, with
3266	// similar default/silence behavior. If no audio_source_name is specified, then
3267	// "Audio Selector 1" will be chosen automatically.
3268	AudioSourceName *string `locationName:"audioSourceName" type:"string"`
3269
3270	// Applies only if Follow Input Audio Type is unchecked (false). A number between
3271	// 0 and 255. The following are defined in ISO-IEC 13818-1: 0 = Undefined, 1
3272	// = Clean Effects, 2 = Hearing Impaired, 3 = Visually Impaired Commentary,
3273	// 4-255 = Reserved.
3274	AudioType *int64 `locationName:"audioType" type:"integer"`
3275
3276	// When set to FOLLOW_INPUT, if the input contains an ISO 639 audio_type, then
3277	// that value is passed through to the output. If the input contains no ISO
3278	// 639 audio_type, the value in Audio Type is included in the output. Otherwise
3279	// the value in Audio Type is included in the output. Note that this field and
3280	// audioType are both ignored if audioDescriptionBroadcasterMix is set to BROADCASTER_MIXED_AD.
3281	AudioTypeControl *string `locationName:"audioTypeControl" type:"string" enum:"AudioTypeControl"`
3282
3283	// Audio codec settings (CodecSettings) under (AudioDescriptions) contains the
3284	// group of settings related to audio encoding. The settings in this group vary
3285	// depending on the value that you choose for Audio codec (Codec). For each
3286	// codec enum that you choose, define the corresponding settings object. The
3287	// following lists the codec enum, settings object pairs. * AAC, AacSettings
3288	// * MP2, Mp2Settings * MP3, Mp3Settings * WAV, WavSettings * AIFF, AiffSettings
3289	// * AC3, Ac3Settings * EAC3, Eac3Settings * EAC3_ATMOS, Eac3AtmosSettings *
3290	// VORBIS, VorbisSettings * OPUS, OpusSettings
3291	CodecSettings *AudioCodecSettings `locationName:"codecSettings" type:"structure"`
3292
3293	// Specify the language for this audio output track. The service puts this language
3294	// code into your output audio track when you set Language code control (AudioLanguageCodeControl)
3295	// to Use configured (USE_CONFIGURED). The service also uses your specified
3296	// custom language code when you set Language code control (AudioLanguageCodeControl)
3297	// to Follow input (FOLLOW_INPUT), but your input file doesn't specify a language
3298	// code. For all outputs, you can use an ISO 639-2 or ISO 639-3 code. For streaming
3299	// outputs, you can also use any other code in the full RFC-5646 specification.
3300	// Streaming outputs are those that are in one of the following output groups:
3301	// CMAF, DASH ISO, Apple HLS, or Microsoft Smooth Streaming.
3302	CustomLanguageCode *string `locationName:"customLanguageCode" type:"string"`
3303
3304	// Indicates the language of the audio output track. The ISO 639 language specified
3305	// in the 'Language Code' drop down will be used when 'Follow Input Language
3306	// Code' is not selected or when 'Follow Input Language Code' is selected but
3307	// there is no ISO 639 language code specified by the input.
3308	LanguageCode *string `locationName:"languageCode" type:"string" enum:"LanguageCode"`
3309
3310	// Specify which source for language code takes precedence for this audio track.
3311	// When you choose Follow input (FOLLOW_INPUT), the service uses the language
3312	// code from the input track if it's present. If there's no languge code on
3313	// the input track, the service uses the code that you specify in the setting
3314	// Language code (languageCode or customLanguageCode). When you choose Use configured
3315	// (USE_CONFIGURED), the service uses the language code that you specify.
3316	LanguageCodeControl *string `locationName:"languageCodeControl" type:"string" enum:"AudioLanguageCodeControl"`
3317
3318	// Advanced audio remixing settings.
3319	RemixSettings *RemixSettings `locationName:"remixSettings" type:"structure"`
3320
3321	// Specify a label for this output audio stream. For example, "English", "Director
3322	// commentary", or "track_2". For streaming outputs, MediaConvert passes this
3323	// information into destination manifests for display on the end-viewer's player
3324	// device. For outputs in other output groups, the service ignores this setting.
3325	StreamName *string `locationName:"streamName" type:"string"`
3326}
3327
3328// String returns the string representation
3329func (s AudioDescription) String() string {
3330	return awsutil.Prettify(s)
3331}
3332
3333// GoString returns the string representation
3334func (s AudioDescription) GoString() string {
3335	return s.String()
3336}
3337
3338// Validate inspects the fields of the type to determine if they are valid.
3339func (s *AudioDescription) Validate() error {
3340	invalidParams := request.ErrInvalidParams{Context: "AudioDescription"}
3341	if s.AudioNormalizationSettings != nil {
3342		if err := s.AudioNormalizationSettings.Validate(); err != nil {
3343			invalidParams.AddNested("AudioNormalizationSettings", err.(request.ErrInvalidParams))
3344		}
3345	}
3346	if s.CodecSettings != nil {
3347		if err := s.CodecSettings.Validate(); err != nil {
3348			invalidParams.AddNested("CodecSettings", err.(request.ErrInvalidParams))
3349		}
3350	}
3351	if s.RemixSettings != nil {
3352		if err := s.RemixSettings.Validate(); err != nil {
3353			invalidParams.AddNested("RemixSettings", err.(request.ErrInvalidParams))
3354		}
3355	}
3356
3357	if invalidParams.Len() > 0 {
3358		return invalidParams
3359	}
3360	return nil
3361}
3362
3363// SetAudioChannelTaggingSettings sets the AudioChannelTaggingSettings field's value.
3364func (s *AudioDescription) SetAudioChannelTaggingSettings(v *AudioChannelTaggingSettings) *AudioDescription {
3365	s.AudioChannelTaggingSettings = v
3366	return s
3367}
3368
3369// SetAudioNormalizationSettings sets the AudioNormalizationSettings field's value.
3370func (s *AudioDescription) SetAudioNormalizationSettings(v *AudioNormalizationSettings) *AudioDescription {
3371	s.AudioNormalizationSettings = v
3372	return s
3373}
3374
3375// SetAudioSourceName sets the AudioSourceName field's value.
3376func (s *AudioDescription) SetAudioSourceName(v string) *AudioDescription {
3377	s.AudioSourceName = &v
3378	return s
3379}
3380
3381// SetAudioType sets the AudioType field's value.
3382func (s *AudioDescription) SetAudioType(v int64) *AudioDescription {
3383	s.AudioType = &v
3384	return s
3385}
3386
3387// SetAudioTypeControl sets the AudioTypeControl field's value.
3388func (s *AudioDescription) SetAudioTypeControl(v string) *AudioDescription {
3389	s.AudioTypeControl = &v
3390	return s
3391}
3392
3393// SetCodecSettings sets the CodecSettings field's value.
3394func (s *AudioDescription) SetCodecSettings(v *AudioCodecSettings) *AudioDescription {
3395	s.CodecSettings = v
3396	return s
3397}
3398
3399// SetCustomLanguageCode sets the CustomLanguageCode field's value.
3400func (s *AudioDescription) SetCustomLanguageCode(v string) *AudioDescription {
3401	s.CustomLanguageCode = &v
3402	return s
3403}
3404
3405// SetLanguageCode sets the LanguageCode field's value.
3406func (s *AudioDescription) SetLanguageCode(v string) *AudioDescription {
3407	s.LanguageCode = &v
3408	return s
3409}
3410
3411// SetLanguageCodeControl sets the LanguageCodeControl field's value.
3412func (s *AudioDescription) SetLanguageCodeControl(v string) *AudioDescription {
3413	s.LanguageCodeControl = &v
3414	return s
3415}
3416
3417// SetRemixSettings sets the RemixSettings field's value.
3418func (s *AudioDescription) SetRemixSettings(v *RemixSettings) *AudioDescription {
3419	s.RemixSettings = v
3420	return s
3421}
3422
3423// SetStreamName sets the StreamName field's value.
3424func (s *AudioDescription) SetStreamName(v string) *AudioDescription {
3425	s.StreamName = &v
3426	return s
3427}
3428
3429// Advanced audio normalization settings. Ignore these settings unless you need
3430// to comply with a loudness standard.
3431type AudioNormalizationSettings struct {
3432	_ struct{} `type:"structure"`
3433
3434	// Choose one of the following audio normalization algorithms: ITU-R BS.1770-1:
3435	// Ungated loudness. A measurement of ungated average loudness for an entire
3436	// piece of content, suitable for measurement of short-form content under ATSC
3437	// recommendation A/85. Supports up to 5.1 audio channels. ITU-R BS.1770-2:
3438	// Gated loudness. A measurement of gated average loudness compliant with the
3439	// requirements of EBU-R128. Supports up to 5.1 audio channels. ITU-R BS.1770-3:
3440	// Modified peak. The same loudness measurement algorithm as 1770-2, with an
3441	// updated true peak measurement. ITU-R BS.1770-4: Higher channel count. Allows
3442	// for more audio channels than the other algorithms, including configurations
3443	// such as 7.1.
3444	Algorithm *string `locationName:"algorithm" type:"string" enum:"AudioNormalizationAlgorithm"`
3445
3446	// When enabled the output audio is corrected using the chosen algorithm. If
3447	// disabled, the audio will be measured but not adjusted.
3448	AlgorithmControl *string `locationName:"algorithmControl" type:"string" enum:"AudioNormalizationAlgorithmControl"`
3449
3450	// Content measuring above this level will be corrected to the target level.
3451	// Content measuring below this level will not be corrected.
3452	CorrectionGateLevel *int64 `locationName:"correctionGateLevel" type:"integer"`
3453
3454	// If set to LOG, log each output's audio track loudness to a CSV file.
3455	LoudnessLogging *string `locationName:"loudnessLogging" type:"string" enum:"AudioNormalizationLoudnessLogging"`
3456
3457	// If set to TRUE_PEAK, calculate and log the TruePeak for each output's audio
3458	// track loudness.
3459	PeakCalculation *string `locationName:"peakCalculation" type:"string" enum:"AudioNormalizationPeakCalculation"`
3460
3461	// When you use Audio normalization (AudioNormalizationSettings), optionally
3462	// use this setting to specify a target loudness. If you don't specify a value
3463	// here, the encoder chooses a value for you, based on the algorithm that you
3464	// choose for Algorithm (algorithm). If you choose algorithm 1770-1, the encoder
3465	// will choose -24 LKFS; otherwise, the encoder will choose -23 LKFS.
3466	TargetLkfs *float64 `locationName:"targetLkfs" type:"double"`
3467}
3468
3469// String returns the string representation
3470func (s AudioNormalizationSettings) String() string {
3471	return awsutil.Prettify(s)
3472}
3473
3474// GoString returns the string representation
3475func (s AudioNormalizationSettings) GoString() string {
3476	return s.String()
3477}
3478
3479// Validate inspects the fields of the type to determine if they are valid.
3480func (s *AudioNormalizationSettings) Validate() error {
3481	invalidParams := request.ErrInvalidParams{Context: "AudioNormalizationSettings"}
3482	if s.CorrectionGateLevel != nil && *s.CorrectionGateLevel < -70 {
3483		invalidParams.Add(request.NewErrParamMinValue("CorrectionGateLevel", -70))
3484	}
3485
3486	if invalidParams.Len() > 0 {
3487		return invalidParams
3488	}
3489	return nil
3490}
3491
3492// SetAlgorithm sets the Algorithm field's value.
3493func (s *AudioNormalizationSettings) SetAlgorithm(v string) *AudioNormalizationSettings {
3494	s.Algorithm = &v
3495	return s
3496}
3497
3498// SetAlgorithmControl sets the AlgorithmControl field's value.
3499func (s *AudioNormalizationSettings) SetAlgorithmControl(v string) *AudioNormalizationSettings {
3500	s.AlgorithmControl = &v
3501	return s
3502}
3503
3504// SetCorrectionGateLevel sets the CorrectionGateLevel field's value.
3505func (s *AudioNormalizationSettings) SetCorrectionGateLevel(v int64) *AudioNormalizationSettings {
3506	s.CorrectionGateLevel = &v
3507	return s
3508}
3509
3510// SetLoudnessLogging sets the LoudnessLogging field's value.
3511func (s *AudioNormalizationSettings) SetLoudnessLogging(v string) *AudioNormalizationSettings {
3512	s.LoudnessLogging = &v
3513	return s
3514}
3515
3516// SetPeakCalculation sets the PeakCalculation field's value.
3517func (s *AudioNormalizationSettings) SetPeakCalculation(v string) *AudioNormalizationSettings {
3518	s.PeakCalculation = &v
3519	return s
3520}
3521
3522// SetTargetLkfs sets the TargetLkfs field's value.
3523func (s *AudioNormalizationSettings) SetTargetLkfs(v float64) *AudioNormalizationSettings {
3524	s.TargetLkfs = &v
3525	return s
3526}
3527
3528// Selector for Audio
3529type AudioSelector struct {
3530	_ struct{} `type:"structure"`
3531
3532	// Selects a specific language code from within an audio source, using the ISO
3533	// 639-2 or ISO 639-3 three-letter language code
3534	CustomLanguageCode *string `locationName:"customLanguageCode" min:"3" type:"string"`
3535
3536	// Enable this setting on one audio selector to set it as the default for the
3537	// job. The service uses this default for outputs where it can't find the specified
3538	// input audio. If you don't set a default, those outputs have no audio.
3539	DefaultSelection *string `locationName:"defaultSelection" type:"string" enum:"AudioDefaultSelection"`
3540
3541	// Specifies audio data from an external file source.
3542	ExternalAudioFileInput *string `locationName:"externalAudioFileInput" type:"string"`
3543
3544	// Selects a specific language code from within an audio source.
3545	LanguageCode *string `locationName:"languageCode" type:"string" enum:"LanguageCode"`
3546
3547	// Specifies a time delta in milliseconds to offset the audio from the input
3548	// video.
3549	Offset *int64 `locationName:"offset" type:"integer"`
3550
3551	// Selects a specific PID from within an audio source (e.g. 257 selects PID
3552	// 0x101).
3553	Pids []*int64 `locationName:"pids" type:"list"`
3554
3555	// Use this setting for input streams that contain Dolby E, to have the service
3556	// extract specific program data from the track. To select multiple programs,
3557	// create multiple selectors with the same Track and different Program numbers.
3558	// In the console, this setting is visible when you set Selector type to Track.
3559	// Choose the program number from the dropdown list. If you are sending a JSON
3560	// file, provide the program ID, which is part of the audio metadata. If your
3561	// input file has incorrect metadata, you can choose All channels instead of
3562	// a program number to have the service ignore the program IDs and include all
3563	// the programs in the track.
3564	ProgramSelection *int64 `locationName:"programSelection" type:"integer"`
3565
3566	// Use these settings to reorder the audio channels of one input to match those
3567	// of another input. This allows you to combine the two files into a single
3568	// output, one after the other.
3569	RemixSettings *RemixSettings `locationName:"remixSettings" type:"structure"`
3570
3571	// Specifies the type of the audio selector.
3572	SelectorType *string `locationName:"selectorType" type:"string" enum:"AudioSelectorType"`
3573
3574	// Identify a track from the input audio to include in this selector by entering
3575	// the track index number. To include several tracks in a single audio selector,
3576	// specify multiple tracks as follows. Using the console, enter a comma-separated
3577	// list. For examle, type "1,2,3" to include tracks 1 through 3. Specifying
3578	// directly in your JSON job file, provide the track numbers in an array. For
3579	// example, "tracks": [1,2,3].
3580	Tracks []*int64 `locationName:"tracks" type:"list"`
3581}
3582
3583// String returns the string representation
3584func (s AudioSelector) String() string {
3585	return awsutil.Prettify(s)
3586}
3587
3588// GoString returns the string representation
3589func (s AudioSelector) GoString() string {
3590	return s.String()
3591}
3592
3593// Validate inspects the fields of the type to determine if they are valid.
3594func (s *AudioSelector) Validate() error {
3595	invalidParams := request.ErrInvalidParams{Context: "AudioSelector"}
3596	if s.CustomLanguageCode != nil && len(*s.CustomLanguageCode) < 3 {
3597		invalidParams.Add(request.NewErrParamMinLen("CustomLanguageCode", 3))
3598	}
3599	if s.Offset != nil && *s.Offset < -2.147483648e+09 {
3600		invalidParams.Add(request.NewErrParamMinValue("Offset", -2.147483648e+09))
3601	}
3602	if s.RemixSettings != nil {
3603		if err := s.RemixSettings.Validate(); err != nil {
3604			invalidParams.AddNested("RemixSettings", err.(request.ErrInvalidParams))
3605		}
3606	}
3607
3608	if invalidParams.Len() > 0 {
3609		return invalidParams
3610	}
3611	return nil
3612}
3613
3614// SetCustomLanguageCode sets the CustomLanguageCode field's value.
3615func (s *AudioSelector) SetCustomLanguageCode(v string) *AudioSelector {
3616	s.CustomLanguageCode = &v
3617	return s
3618}
3619
3620// SetDefaultSelection sets the DefaultSelection field's value.
3621func (s *AudioSelector) SetDefaultSelection(v string) *AudioSelector {
3622	s.DefaultSelection = &v
3623	return s
3624}
3625
3626// SetExternalAudioFileInput sets the ExternalAudioFileInput field's value.
3627func (s *AudioSelector) SetExternalAudioFileInput(v string) *AudioSelector {
3628	s.ExternalAudioFileInput = &v
3629	return s
3630}
3631
3632// SetLanguageCode sets the LanguageCode field's value.
3633func (s *AudioSelector) SetLanguageCode(v string) *AudioSelector {
3634	s.LanguageCode = &v
3635	return s
3636}
3637
3638// SetOffset sets the Offset field's value.
3639func (s *AudioSelector) SetOffset(v int64) *AudioSelector {
3640	s.Offset = &v
3641	return s
3642}
3643
3644// SetPids sets the Pids field's value.
3645func (s *AudioSelector) SetPids(v []*int64) *AudioSelector {
3646	s.Pids = v
3647	return s
3648}
3649
3650// SetProgramSelection sets the ProgramSelection field's value.
3651func (s *AudioSelector) SetProgramSelection(v int64) *AudioSelector {
3652	s.ProgramSelection = &v
3653	return s
3654}
3655
3656// SetRemixSettings sets the RemixSettings field's value.
3657func (s *AudioSelector) SetRemixSettings(v *RemixSettings) *AudioSelector {
3658	s.RemixSettings = v
3659	return s
3660}
3661
3662// SetSelectorType sets the SelectorType field's value.
3663func (s *AudioSelector) SetSelectorType(v string) *AudioSelector {
3664	s.SelectorType = &v
3665	return s
3666}
3667
3668// SetTracks sets the Tracks field's value.
3669func (s *AudioSelector) SetTracks(v []*int64) *AudioSelector {
3670	s.Tracks = v
3671	return s
3672}
3673
3674// Group of Audio Selectors
3675type AudioSelectorGroup struct {
3676	_ struct{} `type:"structure"`
3677
3678	// Name of an Audio Selector within the same input to include in the group.
3679	// Audio selector names are standardized, based on their order within the input
3680	// (e.g., "Audio Selector 1"). The audio selector name parameter can be repeated
3681	// to add any number of audio selectors to the group.
3682	AudioSelectorNames []*string `locationName:"audioSelectorNames" type:"list"`
3683}
3684
3685// String returns the string representation
3686func (s AudioSelectorGroup) String() string {
3687	return awsutil.Prettify(s)
3688}
3689
3690// GoString returns the string representation
3691func (s AudioSelectorGroup) GoString() string {
3692	return s.String()
3693}
3694
3695// SetAudioSelectorNames sets the AudioSelectorNames field's value.
3696func (s *AudioSelectorGroup) SetAudioSelectorNames(v []*string) *AudioSelectorGroup {
3697	s.AudioSelectorNames = v
3698	return s
3699}
3700
3701// Use automated ABR to have MediaConvert set up the renditions in your ABR
3702// package for you automatically, based on characteristics of your input video.
3703// This feature optimizes video quality while minimizing the overall size of
3704// your ABR package.
3705type AutomatedAbrSettings struct {
3706	_ struct{} `type:"structure"`
3707
3708	// Optional. The maximum target bit rate used in your automated ABR stack. Use
3709	// this value to set an upper limit on the bandwidth consumed by the highest-quality
3710	// rendition. This is the rendition that is delivered to viewers with the fastest
3711	// internet connections. If you don't specify a value, MediaConvert uses 8,000,000
3712	// (8 mb/s) by default.
3713	MaxAbrBitrate *int64 `locationName:"maxAbrBitrate" min:"100000" type:"integer"`
3714
3715	// Optional. The maximum number of renditions that MediaConvert will create
3716	// in your automated ABR stack. The number of renditions is determined automatically,
3717	// based on analysis of each job, but will never exceed this limit. When you
3718	// set this to Auto in the console, which is equivalent to excluding it from
3719	// your JSON job specification, MediaConvert defaults to a limit of 15.
3720	MaxRenditions *int64 `locationName:"maxRenditions" min:"3" type:"integer"`
3721
3722	// Optional. The minimum target bitrate used in your automated ABR stack. Use
3723	// this value to set a lower limit on the bitrate of video delivered to viewers
3724	// with slow internet connections. If you don't specify a value, MediaConvert
3725	// uses 600,000 (600 kb/s) by default.
3726	MinAbrBitrate *int64 `locationName:"minAbrBitrate" min:"100000" type:"integer"`
3727}
3728
3729// String returns the string representation
3730func (s AutomatedAbrSettings) String() string {
3731	return awsutil.Prettify(s)
3732}
3733
3734// GoString returns the string representation
3735func (s AutomatedAbrSettings) GoString() string {
3736	return s.String()
3737}
3738
3739// Validate inspects the fields of the type to determine if they are valid.
3740func (s *AutomatedAbrSettings) Validate() error {
3741	invalidParams := request.ErrInvalidParams{Context: "AutomatedAbrSettings"}
3742	if s.MaxAbrBitrate != nil && *s.MaxAbrBitrate < 100000 {
3743		invalidParams.Add(request.NewErrParamMinValue("MaxAbrBitrate", 100000))
3744	}
3745	if s.MaxRenditions != nil && *s.MaxRenditions < 3 {
3746		invalidParams.Add(request.NewErrParamMinValue("MaxRenditions", 3))
3747	}
3748	if s.MinAbrBitrate != nil && *s.MinAbrBitrate < 100000 {
3749		invalidParams.Add(request.NewErrParamMinValue("MinAbrBitrate", 100000))
3750	}
3751
3752	if invalidParams.Len() > 0 {
3753		return invalidParams
3754	}
3755	return nil
3756}
3757
3758// SetMaxAbrBitrate sets the MaxAbrBitrate field's value.
3759func (s *AutomatedAbrSettings) SetMaxAbrBitrate(v int64) *AutomatedAbrSettings {
3760	s.MaxAbrBitrate = &v
3761	return s
3762}
3763
3764// SetMaxRenditions sets the MaxRenditions field's value.
3765func (s *AutomatedAbrSettings) SetMaxRenditions(v int64) *AutomatedAbrSettings {
3766	s.MaxRenditions = &v
3767	return s
3768}
3769
3770// SetMinAbrBitrate sets the MinAbrBitrate field's value.
3771func (s *AutomatedAbrSettings) SetMinAbrBitrate(v int64) *AutomatedAbrSettings {
3772	s.MinAbrBitrate = &v
3773	return s
3774}
3775
3776// Use automated encoding to have MediaConvert choose your encoding settings
3777// for you, based on characteristics of your input video.
3778type AutomatedEncodingSettings struct {
3779	_ struct{} `type:"structure"`
3780
3781	// Use automated ABR to have MediaConvert set up the renditions in your ABR
3782	// package for you automatically, based on characteristics of your input video.
3783	// This feature optimizes video quality while minimizing the overall size of
3784	// your ABR package.
3785	AbrSettings *AutomatedAbrSettings `locationName:"abrSettings" type:"structure"`
3786}
3787
3788// String returns the string representation
3789func (s AutomatedEncodingSettings) String() string {
3790	return awsutil.Prettify(s)
3791}
3792
3793// GoString returns the string representation
3794func (s AutomatedEncodingSettings) GoString() string {
3795	return s.String()
3796}
3797
3798// Validate inspects the fields of the type to determine if they are valid.
3799func (s *AutomatedEncodingSettings) Validate() error {
3800	invalidParams := request.ErrInvalidParams{Context: "AutomatedEncodingSettings"}
3801	if s.AbrSettings != nil {
3802		if err := s.AbrSettings.Validate(); err != nil {
3803			invalidParams.AddNested("AbrSettings", err.(request.ErrInvalidParams))
3804		}
3805	}
3806
3807	if invalidParams.Len() > 0 {
3808		return invalidParams
3809	}
3810	return nil
3811}
3812
3813// SetAbrSettings sets the AbrSettings field's value.
3814func (s *AutomatedEncodingSettings) SetAbrSettings(v *AutomatedAbrSettings) *AutomatedEncodingSettings {
3815	s.AbrSettings = v
3816	return s
3817}
3818
3819// Settings for quality-defined variable bitrate encoding with the AV1 codec.
3820// Required when you set Rate control mode to QVBR. Not valid when you set Rate
3821// control mode to a value other than QVBR, or when you don't define Rate control
3822// mode.
3823type Av1QvbrSettings struct {
3824	_ struct{} `type:"structure"`
3825
3826	// Required when you use QVBR rate control mode. That is, when you specify qvbrSettings
3827	// within av1Settings. Specify the general target quality level for this output,
3828	// from 1 to 10. Use higher numbers for greater quality. Level 10 results in
3829	// nearly lossless compression. The quality level for most broadcast-quality
3830	// transcodes is between 6 and 9. Optionally, to specify a value between whole
3831	// numbers, also provide a value for the setting qvbrQualityLevelFineTune. For
3832	// example, if you want your QVBR quality level to be 7.33, set qvbrQualityLevel
3833	// to 7 and set qvbrQualityLevelFineTune to .33.
3834	QvbrQualityLevel *int64 `locationName:"qvbrQualityLevel" min:"1" type:"integer"`
3835
3836	// Optional. Specify a value here to set the QVBR quality to a level that is
3837	// between whole numbers. For example, if you want your QVBR quality level to
3838	// be 7.33, set qvbrQualityLevel to 7 and set qvbrQualityLevelFineTune to .33.
3839	// MediaConvert rounds your QVBR quality level to the nearest third of a whole
3840	// number. For example, if you set qvbrQualityLevel to 7 and you set qvbrQualityLevelFineTune
3841	// to .25, your actual QVBR quality level is 7.33.
3842	QvbrQualityLevelFineTune *float64 `locationName:"qvbrQualityLevelFineTune" type:"double"`
3843}
3844
3845// String returns the string representation
3846func (s Av1QvbrSettings) String() string {
3847	return awsutil.Prettify(s)
3848}
3849
3850// GoString returns the string representation
3851func (s Av1QvbrSettings) GoString() string {
3852	return s.String()
3853}
3854
3855// Validate inspects the fields of the type to determine if they are valid.
3856func (s *Av1QvbrSettings) Validate() error {
3857	invalidParams := request.ErrInvalidParams{Context: "Av1QvbrSettings"}
3858	if s.QvbrQualityLevel != nil && *s.QvbrQualityLevel < 1 {
3859		invalidParams.Add(request.NewErrParamMinValue("QvbrQualityLevel", 1))
3860	}
3861
3862	if invalidParams.Len() > 0 {
3863		return invalidParams
3864	}
3865	return nil
3866}
3867
3868// SetQvbrQualityLevel sets the QvbrQualityLevel field's value.
3869func (s *Av1QvbrSettings) SetQvbrQualityLevel(v int64) *Av1QvbrSettings {
3870	s.QvbrQualityLevel = &v
3871	return s
3872}
3873
3874// SetQvbrQualityLevelFineTune sets the QvbrQualityLevelFineTune field's value.
3875func (s *Av1QvbrSettings) SetQvbrQualityLevelFineTune(v float64) *Av1QvbrSettings {
3876	s.QvbrQualityLevelFineTune = &v
3877	return s
3878}
3879
3880// Required when you set Codec, under VideoDescription>CodecSettings to the
3881// value AV1.
3882type Av1Settings struct {
3883	_ struct{} `type:"structure"`
3884
3885	// Specify the strength of any adaptive quantization filters that you enable.
3886	// The value that you choose here applies to Spatial adaptive quantization (spatialAdaptiveQuantization).
3887	AdaptiveQuantization *string `locationName:"adaptiveQuantization" type:"string" enum:"Av1AdaptiveQuantization"`
3888
3889	// If you are using the console, use the Framerate setting to specify the frame
3890	// rate for this output. If you want to keep the same frame rate as the input
3891	// video, choose Follow source. If you want to do frame rate conversion, choose
3892	// a frame rate from the dropdown list or choose Custom. The framerates shown
3893	// in the dropdown list are decimal approximations of fractions. If you choose
3894	// Custom, specify your frame rate as a fraction. If you are creating your transcoding
3895	// job specification as a JSON file without the console, use FramerateControl
3896	// to specify which value the service uses for the frame rate for this output.
3897	// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
3898	// from the input. Choose SPECIFIED if you want the service to use the frame
3899	// rate you specify in the settings FramerateNumerator and FramerateDenominator.
3900	FramerateControl *string `locationName:"framerateControl" type:"string" enum:"Av1FramerateControl"`
3901
3902	// Choose the method that you want MediaConvert to use when increasing or decreasing
3903	// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
3904	// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
3905	// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
3906	// smooth picture, but might introduce undesirable video artifacts. For complex
3907	// frame rate conversions, especially if your source video has already been
3908	// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
3909	// motion-compensated interpolation. FrameFormer chooses the best conversion
3910	// method frame by frame. Note that using FrameFormer increases the transcoding
3911	// time and incurs a significant add-on cost.
3912	FramerateConversionAlgorithm *string `locationName:"framerateConversionAlgorithm" type:"string" enum:"Av1FramerateConversionAlgorithm"`
3913
3914	// When you use the API for transcode jobs that use frame rate conversion, specify
3915	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
3916	// FramerateDenominator to specify the denominator of this fraction. In this
3917	// example, use 1001 for the value of FramerateDenominator. When you use the
3918	// console for transcode jobs that use frame rate conversion, provide the value
3919	// as a decimal number for Framerate. In this example, specify 23.976.
3920	FramerateDenominator *int64 `locationName:"framerateDenominator" min:"1" type:"integer"`
3921
3922	// When you use the API for transcode jobs that use frame rate conversion, specify
3923	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
3924	// FramerateNumerator to specify the numerator of this fraction. In this example,
3925	// use 24000 for the value of FramerateNumerator. When you use the console for
3926	// transcode jobs that use frame rate conversion, provide the value as a decimal
3927	// number for Framerate. In this example, specify 23.976.
3928	FramerateNumerator *int64 `locationName:"framerateNumerator" min:"1" type:"integer"`
3929
3930	// Specify the GOP length (keyframe interval) in frames. With AV1, MediaConvert
3931	// doesn't support GOP length in seconds. This value must be greater than zero
3932	// and preferably equal to 1 + ((numberBFrames + 1) * x), where x is an integer
3933	// value.
3934	GopSize *float64 `locationName:"gopSize" type:"double"`
3935
3936	// Maximum bitrate in bits/second. For example, enter five megabits per second
3937	// as 5000000. Required when Rate control mode is QVBR.
3938	MaxBitrate *int64 `locationName:"maxBitrate" min:"1000" type:"integer"`
3939
3940	// Specify the number of B-frames. With AV1, MediaConvert supports only 7 or
3941	// 15.
3942	NumberBFramesBetweenReferenceFrames *int64 `locationName:"numberBFramesBetweenReferenceFrames" min:"7" type:"integer"`
3943
3944	// Settings for quality-defined variable bitrate encoding with the AV1 codec.
3945	// Required when you set Rate control mode to QVBR. Not valid when you set Rate
3946	// control mode to a value other than QVBR, or when you don't define Rate control
3947	// mode.
3948	QvbrSettings *Av1QvbrSettings `locationName:"qvbrSettings" type:"structure"`
3949
3950	// 'With AV1 outputs, for rate control mode, MediaConvert supports only quality-defined
3951	// variable bitrate (QVBR). You can''t use CBR or VBR.'
3952	RateControlMode *string `locationName:"rateControlMode" type:"string" enum:"Av1RateControlMode"`
3953
3954	// Specify the number of slices per picture. This value must be 1, 2, 4, 8,
3955	// 16, or 32. For progressive pictures, this value must be less than or equal
3956	// to the number of macroblock rows. For interlaced pictures, this value must
3957	// be less than or equal to half the number of macroblock rows.
3958	Slices *int64 `locationName:"slices" min:"1" type:"integer"`
3959
3960	// Keep the default value, Enabled (ENABLED), to adjust quantization within
3961	// each frame based on spatial variation of content complexity. When you enable
3962	// this feature, the encoder uses fewer bits on areas that can sustain more
3963	// distortion with no noticeable visual degradation and uses more bits on areas
3964	// where any small distortion will be noticeable. For example, complex textured
3965	// blocks are encoded with fewer bits and smooth textured blocks are encoded
3966	// with more bits. Enabling this feature will almost always improve your video
3967	// quality. Note, though, that this feature doesn't take into account where
3968	// the viewer's attention is likely to be. If viewers are likely to be focusing
3969	// their attention on a part of the screen with a lot of complex texture, you
3970	// might choose to disable this feature. Related setting: When you enable spatial
3971	// adaptive quantization, set the value for Adaptive quantization (adaptiveQuantization)
3972	// depending on your content. For homogeneous content, such as cartoons and
3973	// video games, set it to Low. For content with a wider variety of textures,
3974	// set it to High or Higher.
3975	SpatialAdaptiveQuantization *string `locationName:"spatialAdaptiveQuantization" type:"string" enum:"Av1SpatialAdaptiveQuantization"`
3976}
3977
3978// String returns the string representation
3979func (s Av1Settings) String() string {
3980	return awsutil.Prettify(s)
3981}
3982
3983// GoString returns the string representation
3984func (s Av1Settings) GoString() string {
3985	return s.String()
3986}
3987
3988// Validate inspects the fields of the type to determine if they are valid.
3989func (s *Av1Settings) Validate() error {
3990	invalidParams := request.ErrInvalidParams{Context: "Av1Settings"}
3991	if s.FramerateDenominator != nil && *s.FramerateDenominator < 1 {
3992		invalidParams.Add(request.NewErrParamMinValue("FramerateDenominator", 1))
3993	}
3994	if s.FramerateNumerator != nil && *s.FramerateNumerator < 1 {
3995		invalidParams.Add(request.NewErrParamMinValue("FramerateNumerator", 1))
3996	}
3997	if s.MaxBitrate != nil && *s.MaxBitrate < 1000 {
3998		invalidParams.Add(request.NewErrParamMinValue("MaxBitrate", 1000))
3999	}
4000	if s.NumberBFramesBetweenReferenceFrames != nil && *s.NumberBFramesBetweenReferenceFrames < 7 {
4001		invalidParams.Add(request.NewErrParamMinValue("NumberBFramesBetweenReferenceFrames", 7))
4002	}
4003	if s.Slices != nil && *s.Slices < 1 {
4004		invalidParams.Add(request.NewErrParamMinValue("Slices", 1))
4005	}
4006	if s.QvbrSettings != nil {
4007		if err := s.QvbrSettings.Validate(); err != nil {
4008			invalidParams.AddNested("QvbrSettings", err.(request.ErrInvalidParams))
4009		}
4010	}
4011
4012	if invalidParams.Len() > 0 {
4013		return invalidParams
4014	}
4015	return nil
4016}
4017
4018// SetAdaptiveQuantization sets the AdaptiveQuantization field's value.
4019func (s *Av1Settings) SetAdaptiveQuantization(v string) *Av1Settings {
4020	s.AdaptiveQuantization = &v
4021	return s
4022}
4023
4024// SetFramerateControl sets the FramerateControl field's value.
4025func (s *Av1Settings) SetFramerateControl(v string) *Av1Settings {
4026	s.FramerateControl = &v
4027	return s
4028}
4029
4030// SetFramerateConversionAlgorithm sets the FramerateConversionAlgorithm field's value.
4031func (s *Av1Settings) SetFramerateConversionAlgorithm(v string) *Av1Settings {
4032	s.FramerateConversionAlgorithm = &v
4033	return s
4034}
4035
4036// SetFramerateDenominator sets the FramerateDenominator field's value.
4037func (s *Av1Settings) SetFramerateDenominator(v int64) *Av1Settings {
4038	s.FramerateDenominator = &v
4039	return s
4040}
4041
4042// SetFramerateNumerator sets the FramerateNumerator field's value.
4043func (s *Av1Settings) SetFramerateNumerator(v int64) *Av1Settings {
4044	s.FramerateNumerator = &v
4045	return s
4046}
4047
4048// SetGopSize sets the GopSize field's value.
4049func (s *Av1Settings) SetGopSize(v float64) *Av1Settings {
4050	s.GopSize = &v
4051	return s
4052}
4053
4054// SetMaxBitrate sets the MaxBitrate field's value.
4055func (s *Av1Settings) SetMaxBitrate(v int64) *Av1Settings {
4056	s.MaxBitrate = &v
4057	return s
4058}
4059
4060// SetNumberBFramesBetweenReferenceFrames sets the NumberBFramesBetweenReferenceFrames field's value.
4061func (s *Av1Settings) SetNumberBFramesBetweenReferenceFrames(v int64) *Av1Settings {
4062	s.NumberBFramesBetweenReferenceFrames = &v
4063	return s
4064}
4065
4066// SetQvbrSettings sets the QvbrSettings field's value.
4067func (s *Av1Settings) SetQvbrSettings(v *Av1QvbrSettings) *Av1Settings {
4068	s.QvbrSettings = v
4069	return s
4070}
4071
4072// SetRateControlMode sets the RateControlMode field's value.
4073func (s *Av1Settings) SetRateControlMode(v string) *Av1Settings {
4074	s.RateControlMode = &v
4075	return s
4076}
4077
4078// SetSlices sets the Slices field's value.
4079func (s *Av1Settings) SetSlices(v int64) *Av1Settings {
4080	s.Slices = &v
4081	return s
4082}
4083
4084// SetSpatialAdaptiveQuantization sets the SpatialAdaptiveQuantization field's value.
4085func (s *Av1Settings) SetSpatialAdaptiveQuantization(v string) *Av1Settings {
4086	s.SpatialAdaptiveQuantization = &v
4087	return s
4088}
4089
4090// Settings for Avail Blanking
4091type AvailBlanking struct {
4092	_ struct{} `type:"structure"`
4093
4094	// Blanking image to be used. Leave empty for solid black. Only bmp and png
4095	// images are supported.
4096	AvailBlankingImage *string `locationName:"availBlankingImage" min:"14" type:"string"`
4097}
4098
4099// String returns the string representation
4100func (s AvailBlanking) String() string {
4101	return awsutil.Prettify(s)
4102}
4103
4104// GoString returns the string representation
4105func (s AvailBlanking) GoString() string {
4106	return s.String()
4107}
4108
4109// Validate inspects the fields of the type to determine if they are valid.
4110func (s *AvailBlanking) Validate() error {
4111	invalidParams := request.ErrInvalidParams{Context: "AvailBlanking"}
4112	if s.AvailBlankingImage != nil && len(*s.AvailBlankingImage) < 14 {
4113		invalidParams.Add(request.NewErrParamMinLen("AvailBlankingImage", 14))
4114	}
4115
4116	if invalidParams.Len() > 0 {
4117		return invalidParams
4118	}
4119	return nil
4120}
4121
4122// SetAvailBlankingImage sets the AvailBlankingImage field's value.
4123func (s *AvailBlanking) SetAvailBlankingImage(v string) *AvailBlanking {
4124	s.AvailBlankingImage = &v
4125	return s
4126}
4127
4128// Required when you set your output video codec to AVC-Intra. For more information
4129// about the AVC-I settings, see the relevant specification. For detailed information
4130// about SD and HD in AVC-I, see https://ieeexplore.ieee.org/document/7290936.
4131// For information about 4K/2K in AVC-I, see https://pro-av.panasonic.net/en/avc-ultra/AVC-ULTRAoverview.pdf.
4132type AvcIntraSettings struct {
4133	_ struct{} `type:"structure"`
4134
4135	// Specify the AVC-Intra class of your output. The AVC-Intra class selection
4136	// determines the output video bit rate depending on the frame rate of the output.
4137	// Outputs with higher class values have higher bitrates and improved image
4138	// quality. Note that for Class 4K/2K, MediaConvert supports only 4:2:2 chroma
4139	// subsampling.
4140	AvcIntraClass *string `locationName:"avcIntraClass" type:"string" enum:"AvcIntraClass"`
4141
4142	// Optional when you set AVC-Intra class (avcIntraClass) to Class 4K/2K (CLASS_4K_2K).
4143	// When you set AVC-Intra class to a different value, this object isn't allowed.
4144	AvcIntraUhdSettings *AvcIntraUhdSettings `locationName:"avcIntraUhdSettings" type:"structure"`
4145
4146	// If you are using the console, use the Framerate setting to specify the frame
4147	// rate for this output. If you want to keep the same frame rate as the input
4148	// video, choose Follow source. If you want to do frame rate conversion, choose
4149	// a frame rate from the dropdown list or choose Custom. The framerates shown
4150	// in the dropdown list are decimal approximations of fractions. If you choose
4151	// Custom, specify your frame rate as a fraction. If you are creating your transcoding
4152	// job specification as a JSON file without the console, use FramerateControl
4153	// to specify which value the service uses for the frame rate for this output.
4154	// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
4155	// from the input. Choose SPECIFIED if you want the service to use the frame
4156	// rate you specify in the settings FramerateNumerator and FramerateDenominator.
4157	FramerateControl *string `locationName:"framerateControl" type:"string" enum:"AvcIntraFramerateControl"`
4158
4159	// Choose the method that you want MediaConvert to use when increasing or decreasing
4160	// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
4161	// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
4162	// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
4163	// smooth picture, but might introduce undesirable video artifacts. For complex
4164	// frame rate conversions, especially if your source video has already been
4165	// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
4166	// motion-compensated interpolation. FrameFormer chooses the best conversion
4167	// method frame by frame. Note that using FrameFormer increases the transcoding
4168	// time and incurs a significant add-on cost.
4169	FramerateConversionAlgorithm *string `locationName:"framerateConversionAlgorithm" type:"string" enum:"AvcIntraFramerateConversionAlgorithm"`
4170
4171	// When you use the API for transcode jobs that use frame rate conversion, specify
4172	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
4173	// FramerateDenominator to specify the denominator of this fraction. In this
4174	// example, use 1001 for the value of FramerateDenominator. When you use the
4175	// console for transcode jobs that use frame rate conversion, provide the value
4176	// as a decimal number for Framerate. In this example, specify 23.976.
4177	FramerateDenominator *int64 `locationName:"framerateDenominator" min:"1" type:"integer"`
4178
4179	// When you use the API for transcode jobs that use frame rate conversion, specify
4180	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
4181	// FramerateNumerator to specify the numerator of this fraction. In this example,
4182	// use 24000 for the value of FramerateNumerator. When you use the console for
4183	// transcode jobs that use frame rate conversion, provide the value as a decimal
4184	// number for Framerate. In this example, specify 23.976.
4185	FramerateNumerator *int64 `locationName:"framerateNumerator" min:"24" type:"integer"`
4186
4187	// Choose the scan line type for the output. Keep the default value, Progressive
4188	// (PROGRESSIVE) to create a progressive output, regardless of the scan type
4189	// of your input. Use Top field first (TOP_FIELD) or Bottom field first (BOTTOM_FIELD)
4190	// to create an output that's interlaced with the same field polarity throughout.
4191	// Use Follow, default top (FOLLOW_TOP_FIELD) or Follow, default bottom (FOLLOW_BOTTOM_FIELD)
4192	// to produce outputs with the same field polarity as the source. For jobs that
4193	// have multiple inputs, the output field polarity might change over the course
4194	// of the output. Follow behavior depends on the input scan type. If the source
4195	// is interlaced, the output will be interlaced with the same polarity as the
4196	// source. If the source is progressive, the output will be interlaced with
4197	// top field bottom field first, depending on which of the Follow options you
4198	// choose.
4199	InterlaceMode *string `locationName:"interlaceMode" type:"string" enum:"AvcIntraInterlaceMode"`
4200
4201	// Use this setting for interlaced outputs, when your output frame rate is half
4202	// of your input frame rate. In this situation, choose Optimized interlacing
4203	// (INTERLACED_OPTIMIZE) to create a better quality interlaced output. In this
4204	// case, each progressive frame from the input corresponds to an interlaced
4205	// field in the output. Keep the default value, Basic interlacing (INTERLACED),
4206	// for all other output frame rates. With basic interlacing, MediaConvert performs
4207	// any frame rate conversion first and then interlaces the frames. When you
4208	// choose Optimized interlacing and you set your output frame rate to a value
4209	// that isn't suitable for optimized interlacing, MediaConvert automatically
4210	// falls back to basic interlacing. Required settings: To use optimized interlacing,
4211	// you must set Telecine (telecine) to None (NONE) or Soft (SOFT). You can't
4212	// use optimized interlacing for hard telecine outputs. You must also set Interlace
4213	// mode (interlaceMode) to a value other than Progressive (PROGRESSIVE).
4214	ScanTypeConversionMode *string `locationName:"scanTypeConversionMode" type:"string" enum:"AvcIntraScanTypeConversionMode"`
4215
4216	// Ignore this setting unless your input frame rate is 23.976 or 24 frames per
4217	// second (fps). Enable slow PAL to create a 25 fps output. When you enable
4218	// slow PAL, MediaConvert relabels the video frames to 25 fps and resamples
4219	// your audio to keep it synchronized with the video. Note that enabling this
4220	// setting will slightly reduce the duration of your video. Required settings:
4221	// You must also set Framerate to 25. In your JSON job specification, set (framerateControl)
4222	// to (SPECIFIED), (framerateNumerator) to 25 and (framerateDenominator) to
4223	// 1.
4224	SlowPal *string `locationName:"slowPal" type:"string" enum:"AvcIntraSlowPal"`
4225
4226	// When you do frame rate conversion from 23.976 frames per second (fps) to
4227	// 29.97 fps, and your output scan type is interlaced, you can optionally enable
4228	// hard telecine (HARD) to create a smoother picture. When you keep the default
4229	// value, None (NONE), MediaConvert does a standard frame rate conversion to
4230	// 29.97 without doing anything with the field polarity to create a smoother
4231	// picture.
4232	Telecine *string `locationName:"telecine" type:"string" enum:"AvcIntraTelecine"`
4233}
4234
4235// String returns the string representation
4236func (s AvcIntraSettings) String() string {
4237	return awsutil.Prettify(s)
4238}
4239
4240// GoString returns the string representation
4241func (s AvcIntraSettings) GoString() string {
4242	return s.String()
4243}
4244
4245// Validate inspects the fields of the type to determine if they are valid.
4246func (s *AvcIntraSettings) Validate() error {
4247	invalidParams := request.ErrInvalidParams{Context: "AvcIntraSettings"}
4248	if s.FramerateDenominator != nil && *s.FramerateDenominator < 1 {
4249		invalidParams.Add(request.NewErrParamMinValue("FramerateDenominator", 1))
4250	}
4251	if s.FramerateNumerator != nil && *s.FramerateNumerator < 24 {
4252		invalidParams.Add(request.NewErrParamMinValue("FramerateNumerator", 24))
4253	}
4254
4255	if invalidParams.Len() > 0 {
4256		return invalidParams
4257	}
4258	return nil
4259}
4260
4261// SetAvcIntraClass sets the AvcIntraClass field's value.
4262func (s *AvcIntraSettings) SetAvcIntraClass(v string) *AvcIntraSettings {
4263	s.AvcIntraClass = &v
4264	return s
4265}
4266
4267// SetAvcIntraUhdSettings sets the AvcIntraUhdSettings field's value.
4268func (s *AvcIntraSettings) SetAvcIntraUhdSettings(v *AvcIntraUhdSettings) *AvcIntraSettings {
4269	s.AvcIntraUhdSettings = v
4270	return s
4271}
4272
4273// SetFramerateControl sets the FramerateControl field's value.
4274func (s *AvcIntraSettings) SetFramerateControl(v string) *AvcIntraSettings {
4275	s.FramerateControl = &v
4276	return s
4277}
4278
4279// SetFramerateConversionAlgorithm sets the FramerateConversionAlgorithm field's value.
4280func (s *AvcIntraSettings) SetFramerateConversionAlgorithm(v string) *AvcIntraSettings {
4281	s.FramerateConversionAlgorithm = &v
4282	return s
4283}
4284
4285// SetFramerateDenominator sets the FramerateDenominator field's value.
4286func (s *AvcIntraSettings) SetFramerateDenominator(v int64) *AvcIntraSettings {
4287	s.FramerateDenominator = &v
4288	return s
4289}
4290
4291// SetFramerateNumerator sets the FramerateNumerator field's value.
4292func (s *AvcIntraSettings) SetFramerateNumerator(v int64) *AvcIntraSettings {
4293	s.FramerateNumerator = &v
4294	return s
4295}
4296
4297// SetInterlaceMode sets the InterlaceMode field's value.
4298func (s *AvcIntraSettings) SetInterlaceMode(v string) *AvcIntraSettings {
4299	s.InterlaceMode = &v
4300	return s
4301}
4302
4303// SetScanTypeConversionMode sets the ScanTypeConversionMode field's value.
4304func (s *AvcIntraSettings) SetScanTypeConversionMode(v string) *AvcIntraSettings {
4305	s.ScanTypeConversionMode = &v
4306	return s
4307}
4308
4309// SetSlowPal sets the SlowPal field's value.
4310func (s *AvcIntraSettings) SetSlowPal(v string) *AvcIntraSettings {
4311	s.SlowPal = &v
4312	return s
4313}
4314
4315// SetTelecine sets the Telecine field's value.
4316func (s *AvcIntraSettings) SetTelecine(v string) *AvcIntraSettings {
4317	s.Telecine = &v
4318	return s
4319}
4320
4321// Optional when you set AVC-Intra class (avcIntraClass) to Class 4K/2K (CLASS_4K_2K).
4322// When you set AVC-Intra class to a different value, this object isn't allowed.
4323type AvcIntraUhdSettings struct {
4324	_ struct{} `type:"structure"`
4325
4326	// Optional. Use Quality tuning level (qualityTuningLevel) to choose how many
4327	// transcoding passes MediaConvert does with your video. When you choose Multi-pass
4328	// (MULTI_PASS), your video quality is better and your output bitrate is more
4329	// accurate. That is, the actual bitrate of your output is closer to the target
4330	// bitrate defined in the specification. When you choose Single-pass (SINGLE_PASS),
4331	// your encoding time is faster. The default behavior is Single-pass (SINGLE_PASS).
4332	QualityTuningLevel *string `locationName:"qualityTuningLevel" type:"string" enum:"AvcIntraUhdQualityTuningLevel"`
4333}
4334
4335// String returns the string representation
4336func (s AvcIntraUhdSettings) String() string {
4337	return awsutil.Prettify(s)
4338}
4339
4340// GoString returns the string representation
4341func (s AvcIntraUhdSettings) GoString() string {
4342	return s.String()
4343}
4344
4345// SetQualityTuningLevel sets the QualityTuningLevel field's value.
4346func (s *AvcIntraUhdSettings) SetQualityTuningLevel(v string) *AvcIntraUhdSettings {
4347	s.QualityTuningLevel = &v
4348	return s
4349}
4350
4351type BadRequestException struct {
4352	_            struct{}                  `type:"structure"`
4353	RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"`
4354
4355	Message_ *string `locationName:"message" type:"string"`
4356}
4357
4358// String returns the string representation
4359func (s BadRequestException) String() string {
4360	return awsutil.Prettify(s)
4361}
4362
4363// GoString returns the string representation
4364func (s BadRequestException) GoString() string {
4365	return s.String()
4366}
4367
4368func newErrorBadRequestException(v protocol.ResponseMetadata) error {
4369	return &BadRequestException{
4370		RespMetadata: v,
4371	}
4372}
4373
4374// Code returns the exception type name.
4375func (s *BadRequestException) Code() string {
4376	return "BadRequestException"
4377}
4378
4379// Message returns the exception's message.
4380func (s *BadRequestException) Message() string {
4381	if s.Message_ != nil {
4382		return *s.Message_
4383	}
4384	return ""
4385}
4386
4387// OrigErr always returns nil, satisfies awserr.Error interface.
4388func (s *BadRequestException) OrigErr() error {
4389	return nil
4390}
4391
4392func (s *BadRequestException) Error() string {
4393	return fmt.Sprintf("%s: %s", s.Code(), s.Message())
4394}
4395
4396// Status code returns the HTTP status code for the request's response error.
4397func (s *BadRequestException) StatusCode() int {
4398	return s.RespMetadata.StatusCode
4399}
4400
4401// RequestID returns the service's response RequestID for request.
4402func (s *BadRequestException) RequestID() string {
4403	return s.RespMetadata.RequestID
4404}
4405
4406// Burn-In Destination Settings.
4407type BurninDestinationSettings struct {
4408	_ struct{} `type:"structure"`
4409
4410	// If no explicit x_position or y_position is provided, setting alignment to
4411	// centered will place the captions at the bottom center of the output. Similarly,
4412	// setting a left alignment will align captions to the bottom left of the output.
4413	// If x and y positions are given in conjunction with the alignment parameter,
4414	// the font will be justified (either left or centered) relative to those coordinates.
4415	// This option is not valid for source captions that are STL, 608/embedded or
4416	// teletext. These source settings are already pre-defined by the caption stream.
4417	// All burn-in and DVB-Sub font settings must match.
4418	Alignment *string `locationName:"alignment" type:"string" enum:"BurninSubtitleAlignment"`
4419
4420	// Specifies the color of the rectangle behind the captions.All burn-in and
4421	// DVB-Sub font settings must match.
4422	BackgroundColor *string `locationName:"backgroundColor" type:"string" enum:"BurninSubtitleBackgroundColor"`
4423
4424	// Specifies the opacity of the background rectangle. 255 is opaque; 0 is transparent.
4425	// Leaving this parameter blank is equivalent to setting it to 0 (transparent).
4426	// All burn-in and DVB-Sub font settings must match.
4427	BackgroundOpacity *int64 `locationName:"backgroundOpacity" type:"integer"`
4428
4429	// Specifies the color of the burned-in captions. This option is not valid for
4430	// source captions that are STL, 608/embedded or teletext. These source settings
4431	// are already pre-defined by the caption stream. All burn-in and DVB-Sub font
4432	// settings must match.
4433	FontColor *string `locationName:"fontColor" type:"string" enum:"BurninSubtitleFontColor"`
4434
4435	// Specifies the opacity of the burned-in captions. 255 is opaque; 0 is transparent.All
4436	// burn-in and DVB-Sub font settings must match.
4437	FontOpacity *int64 `locationName:"fontOpacity" type:"integer"`
4438
4439	// Font resolution in DPI (dots per inch); default is 96 dpi.All burn-in and
4440	// DVB-Sub font settings must match.
4441	FontResolution *int64 `locationName:"fontResolution" min:"96" type:"integer"`
4442
4443	// Provide the font script, using an ISO 15924 script code, if the LanguageCode
4444	// is not sufficient for determining the script type. Where LanguageCode or
4445	// CustomLanguageCode is sufficient, use "AUTOMATIC" or leave unset. This is
4446	// used to help determine the appropriate font for rendering burn-in captions.
4447	FontScript *string `locationName:"fontScript" type:"string" enum:"FontScript"`
4448
4449	// A positive integer indicates the exact font size in points. Set to 0 for
4450	// automatic font size selection. All burn-in and DVB-Sub font settings must
4451	// match.
4452	FontSize *int64 `locationName:"fontSize" type:"integer"`
4453
4454	// Specifies font outline color. This option is not valid for source captions
4455	// that are either 608/embedded or teletext. These source settings are already
4456	// pre-defined by the caption stream. All burn-in and DVB-Sub font settings
4457	// must match.
4458	OutlineColor *string `locationName:"outlineColor" type:"string" enum:"BurninSubtitleOutlineColor"`
4459
4460	// Specifies font outline size in pixels. This option is not valid for source
4461	// captions that are either 608/embedded or teletext. These source settings
4462	// are already pre-defined by the caption stream. All burn-in and DVB-Sub font
4463	// settings must match.
4464	OutlineSize *int64 `locationName:"outlineSize" type:"integer"`
4465
4466	// Specifies the color of the shadow cast by the captions.All burn-in and DVB-Sub
4467	// font settings must match.
4468	ShadowColor *string `locationName:"shadowColor" type:"string" enum:"BurninSubtitleShadowColor"`
4469
4470	// Specifies the opacity of the shadow. 255 is opaque; 0 is transparent. Leaving
4471	// this parameter blank is equivalent to setting it to 0 (transparent). All
4472	// burn-in and DVB-Sub font settings must match.
4473	ShadowOpacity *int64 `locationName:"shadowOpacity" type:"integer"`
4474
4475	// Specifies the horizontal offset of the shadow relative to the captions in
4476	// pixels. A value of -2 would result in a shadow offset 2 pixels to the left.
4477	// All burn-in and DVB-Sub font settings must match.
4478	ShadowXOffset *int64 `locationName:"shadowXOffset" type:"integer"`
4479
4480	// Specifies the vertical offset of the shadow relative to the captions in pixels.
4481	// A value of -2 would result in a shadow offset 2 pixels above the text. All
4482	// burn-in and DVB-Sub font settings must match.
4483	ShadowYOffset *int64 `locationName:"shadowYOffset" type:"integer"`
4484
4485	// Only applies to jobs with input captions in Teletext or STL formats. Specify
4486	// whether the spacing between letters in your captions is set by the captions
4487	// grid or varies depending on letter width. Choose fixed grid to conform to
4488	// the spacing specified in the captions file more accurately. Choose proportional
4489	// to make the text easier to read if the captions are closed caption.
4490	TeletextSpacing *string `locationName:"teletextSpacing" type:"string" enum:"BurninSubtitleTeletextSpacing"`
4491
4492	// Specifies the horizontal position of the caption relative to the left side
4493	// of the output in pixels. A value of 10 would result in the captions starting
4494	// 10 pixels from the left of the output. If no explicit x_position is provided,
4495	// the horizontal caption position will be determined by the alignment parameter.
4496	// This option is not valid for source captions that are STL, 608/embedded or
4497	// teletext. These source settings are already pre-defined by the caption stream.
4498	// All burn-in and DVB-Sub font settings must match.
4499	XPosition *int64 `locationName:"xPosition" type:"integer"`
4500
4501	// Specifies the vertical position of the caption relative to the top of the
4502	// output in pixels. A value of 10 would result in the captions starting 10
4503	// pixels from the top of the output. If no explicit y_position is provided,
4504	// the caption will be positioned towards the bottom of the output. This option
4505	// is not valid for source captions that are STL, 608/embedded or teletext.
4506	// These source settings are already pre-defined by the caption stream. All
4507	// burn-in and DVB-Sub font settings must match.
4508	YPosition *int64 `locationName:"yPosition" type:"integer"`
4509}
4510
4511// String returns the string representation
4512func (s BurninDestinationSettings) String() string {
4513	return awsutil.Prettify(s)
4514}
4515
4516// GoString returns the string representation
4517func (s BurninDestinationSettings) GoString() string {
4518	return s.String()
4519}
4520
4521// Validate inspects the fields of the type to determine if they are valid.
4522func (s *BurninDestinationSettings) Validate() error {
4523	invalidParams := request.ErrInvalidParams{Context: "BurninDestinationSettings"}
4524	if s.FontResolution != nil && *s.FontResolution < 96 {
4525		invalidParams.Add(request.NewErrParamMinValue("FontResolution", 96))
4526	}
4527	if s.ShadowXOffset != nil && *s.ShadowXOffset < -2.147483648e+09 {
4528		invalidParams.Add(request.NewErrParamMinValue("ShadowXOffset", -2.147483648e+09))
4529	}
4530	if s.ShadowYOffset != nil && *s.ShadowYOffset < -2.147483648e+09 {
4531		invalidParams.Add(request.NewErrParamMinValue("ShadowYOffset", -2.147483648e+09))
4532	}
4533
4534	if invalidParams.Len() > 0 {
4535		return invalidParams
4536	}
4537	return nil
4538}
4539
4540// SetAlignment sets the Alignment field's value.
4541func (s *BurninDestinationSettings) SetAlignment(v string) *BurninDestinationSettings {
4542	s.Alignment = &v
4543	return s
4544}
4545
4546// SetBackgroundColor sets the BackgroundColor field's value.
4547func (s *BurninDestinationSettings) SetBackgroundColor(v string) *BurninDestinationSettings {
4548	s.BackgroundColor = &v
4549	return s
4550}
4551
4552// SetBackgroundOpacity sets the BackgroundOpacity field's value.
4553func (s *BurninDestinationSettings) SetBackgroundOpacity(v int64) *BurninDestinationSettings {
4554	s.BackgroundOpacity = &v
4555	return s
4556}
4557
4558// SetFontColor sets the FontColor field's value.
4559func (s *BurninDestinationSettings) SetFontColor(v string) *BurninDestinationSettings {
4560	s.FontColor = &v
4561	return s
4562}
4563
4564// SetFontOpacity sets the FontOpacity field's value.
4565func (s *BurninDestinationSettings) SetFontOpacity(v int64) *BurninDestinationSettings {
4566	s.FontOpacity = &v
4567	return s
4568}
4569
4570// SetFontResolution sets the FontResolution field's value.
4571func (s *BurninDestinationSettings) SetFontResolution(v int64) *BurninDestinationSettings {
4572	s.FontResolution = &v
4573	return s
4574}
4575
4576// SetFontScript sets the FontScript field's value.
4577func (s *BurninDestinationSettings) SetFontScript(v string) *BurninDestinationSettings {
4578	s.FontScript = &v
4579	return s
4580}
4581
4582// SetFontSize sets the FontSize field's value.
4583func (s *BurninDestinationSettings) SetFontSize(v int64) *BurninDestinationSettings {
4584	s.FontSize = &v
4585	return s
4586}
4587
4588// SetOutlineColor sets the OutlineColor field's value.
4589func (s *BurninDestinationSettings) SetOutlineColor(v string) *BurninDestinationSettings {
4590	s.OutlineColor = &v
4591	return s
4592}
4593
4594// SetOutlineSize sets the OutlineSize field's value.
4595func (s *BurninDestinationSettings) SetOutlineSize(v int64) *BurninDestinationSettings {
4596	s.OutlineSize = &v
4597	return s
4598}
4599
4600// SetShadowColor sets the ShadowColor field's value.
4601func (s *BurninDestinationSettings) SetShadowColor(v string) *BurninDestinationSettings {
4602	s.ShadowColor = &v
4603	return s
4604}
4605
4606// SetShadowOpacity sets the ShadowOpacity field's value.
4607func (s *BurninDestinationSettings) SetShadowOpacity(v int64) *BurninDestinationSettings {
4608	s.ShadowOpacity = &v
4609	return s
4610}
4611
4612// SetShadowXOffset sets the ShadowXOffset field's value.
4613func (s *BurninDestinationSettings) SetShadowXOffset(v int64) *BurninDestinationSettings {
4614	s.ShadowXOffset = &v
4615	return s
4616}
4617
4618// SetShadowYOffset sets the ShadowYOffset field's value.
4619func (s *BurninDestinationSettings) SetShadowYOffset(v int64) *BurninDestinationSettings {
4620	s.ShadowYOffset = &v
4621	return s
4622}
4623
4624// SetTeletextSpacing sets the TeletextSpacing field's value.
4625func (s *BurninDestinationSettings) SetTeletextSpacing(v string) *BurninDestinationSettings {
4626	s.TeletextSpacing = &v
4627	return s
4628}
4629
4630// SetXPosition sets the XPosition field's value.
4631func (s *BurninDestinationSettings) SetXPosition(v int64) *BurninDestinationSettings {
4632	s.XPosition = &v
4633	return s
4634}
4635
4636// SetYPosition sets the YPosition field's value.
4637func (s *BurninDestinationSettings) SetYPosition(v int64) *BurninDestinationSettings {
4638	s.YPosition = &v
4639	return s
4640}
4641
4642// Cancel a job by sending a request with the job ID
4643type CancelJobInput struct {
4644	_ struct{} `type:"structure"`
4645
4646	// The Job ID of the job to be cancelled.
4647	//
4648	// Id is a required field
4649	Id *string `location:"uri" locationName:"id" type:"string" required:"true"`
4650}
4651
4652// String returns the string representation
4653func (s CancelJobInput) String() string {
4654	return awsutil.Prettify(s)
4655}
4656
4657// GoString returns the string representation
4658func (s CancelJobInput) GoString() string {
4659	return s.String()
4660}
4661
4662// Validate inspects the fields of the type to determine if they are valid.
4663func (s *CancelJobInput) Validate() error {
4664	invalidParams := request.ErrInvalidParams{Context: "CancelJobInput"}
4665	if s.Id == nil {
4666		invalidParams.Add(request.NewErrParamRequired("Id"))
4667	}
4668	if s.Id != nil && len(*s.Id) < 1 {
4669		invalidParams.Add(request.NewErrParamMinLen("Id", 1))
4670	}
4671
4672	if invalidParams.Len() > 0 {
4673		return invalidParams
4674	}
4675	return nil
4676}
4677
4678// SetId sets the Id field's value.
4679func (s *CancelJobInput) SetId(v string) *CancelJobInput {
4680	s.Id = &v
4681	return s
4682}
4683
4684// A cancel job request will receive a response with an empty body.
4685type CancelJobOutput struct {
4686	_ struct{} `type:"structure"`
4687}
4688
4689// String returns the string representation
4690func (s CancelJobOutput) String() string {
4691	return awsutil.Prettify(s)
4692}
4693
4694// GoString returns the string representation
4695func (s CancelJobOutput) GoString() string {
4696	return s.String()
4697}
4698
4699// Description of Caption output
4700type CaptionDescription struct {
4701	_ struct{} `type:"structure"`
4702
4703	// Specifies which "Caption Selector":#inputs-caption_selector to use from each
4704	// input when generating captions. The name should be of the format "Caption
4705	// Selector ", which denotes that the Nth Caption Selector will be used from
4706	// each input.
4707	CaptionSelectorName *string `locationName:"captionSelectorName" min:"1" type:"string"`
4708
4709	// Specify the language for this captions output track. For most captions output
4710	// formats, the encoder puts this language information in the output captions
4711	// metadata. If your output captions format is DVB-Sub or Burn in, the encoder
4712	// uses this language information when automatically selecting the font script
4713	// for rendering the captions text. For all outputs, you can use an ISO 639-2
4714	// or ISO 639-3 code. For streaming outputs, you can also use any other code
4715	// in the full RFC-5646 specification. Streaming outputs are those that are
4716	// in one of the following output groups: CMAF, DASH ISO, Apple HLS, or Microsoft
4717	// Smooth Streaming.
4718	CustomLanguageCode *string `locationName:"customLanguageCode" type:"string"`
4719
4720	// Specific settings required by destination type. Note that burnin_destination_settings
4721	// are not available if the source of the caption data is Embedded or Teletext.
4722	DestinationSettings *CaptionDestinationSettings `locationName:"destinationSettings" type:"structure"`
4723
4724	// Specify the language of this captions output track. For most captions output
4725	// formats, the encoder puts this language information in the output captions
4726	// metadata. If your output captions format is DVB-Sub or Burn in, the encoder
4727	// uses this language information to choose the font language for rendering
4728	// the captions text.
4729	LanguageCode *string `locationName:"languageCode" type:"string" enum:"LanguageCode"`
4730
4731	// Specify a label for this set of output captions. For example, "English",
4732	// "Director commentary", or "track_2". For streaming outputs, MediaConvert
4733	// passes this information into destination manifests for display on the end-viewer's
4734	// player device. For outputs in other output groups, the service ignores this
4735	// setting.
4736	LanguageDescription *string `locationName:"languageDescription" type:"string"`
4737}
4738
4739// String returns the string representation
4740func (s CaptionDescription) String() string {
4741	return awsutil.Prettify(s)
4742}
4743
4744// GoString returns the string representation
4745func (s CaptionDescription) GoString() string {
4746	return s.String()
4747}
4748
4749// Validate inspects the fields of the type to determine if they are valid.
4750func (s *CaptionDescription) Validate() error {
4751	invalidParams := request.ErrInvalidParams{Context: "CaptionDescription"}
4752	if s.CaptionSelectorName != nil && len(*s.CaptionSelectorName) < 1 {
4753		invalidParams.Add(request.NewErrParamMinLen("CaptionSelectorName", 1))
4754	}
4755	if s.DestinationSettings != nil {
4756		if err := s.DestinationSettings.Validate(); err != nil {
4757			invalidParams.AddNested("DestinationSettings", err.(request.ErrInvalidParams))
4758		}
4759	}
4760
4761	if invalidParams.Len() > 0 {
4762		return invalidParams
4763	}
4764	return nil
4765}
4766
4767// SetCaptionSelectorName sets the CaptionSelectorName field's value.
4768func (s *CaptionDescription) SetCaptionSelectorName(v string) *CaptionDescription {
4769	s.CaptionSelectorName = &v
4770	return s
4771}
4772
4773// SetCustomLanguageCode sets the CustomLanguageCode field's value.
4774func (s *CaptionDescription) SetCustomLanguageCode(v string) *CaptionDescription {
4775	s.CustomLanguageCode = &v
4776	return s
4777}
4778
4779// SetDestinationSettings sets the DestinationSettings field's value.
4780func (s *CaptionDescription) SetDestinationSettings(v *CaptionDestinationSettings) *CaptionDescription {
4781	s.DestinationSettings = v
4782	return s
4783}
4784
4785// SetLanguageCode sets the LanguageCode field's value.
4786func (s *CaptionDescription) SetLanguageCode(v string) *CaptionDescription {
4787	s.LanguageCode = &v
4788	return s
4789}
4790
4791// SetLanguageDescription sets the LanguageDescription field's value.
4792func (s *CaptionDescription) SetLanguageDescription(v string) *CaptionDescription {
4793	s.LanguageDescription = &v
4794	return s
4795}
4796
4797// Caption Description for preset
4798type CaptionDescriptionPreset struct {
4799	_ struct{} `type:"structure"`
4800
4801	// Specify the language for this captions output track. For most captions output
4802	// formats, the encoder puts this language information in the output captions
4803	// metadata. If your output captions format is DVB-Sub or Burn in, the encoder
4804	// uses this language information when automatically selecting the font script
4805	// for rendering the captions text. For all outputs, you can use an ISO 639-2
4806	// or ISO 639-3 code. For streaming outputs, you can also use any other code
4807	// in the full RFC-5646 specification. Streaming outputs are those that are
4808	// in one of the following output groups: CMAF, DASH ISO, Apple HLS, or Microsoft
4809	// Smooth Streaming.
4810	CustomLanguageCode *string `locationName:"customLanguageCode" type:"string"`
4811
4812	// Specific settings required by destination type. Note that burnin_destination_settings
4813	// are not available if the source of the caption data is Embedded or Teletext.
4814	DestinationSettings *CaptionDestinationSettings `locationName:"destinationSettings" type:"structure"`
4815
4816	// Specify the language of this captions output track. For most captions output
4817	// formats, the encoder puts this language information in the output captions
4818	// metadata. If your output captions format is DVB-Sub or Burn in, the encoder
4819	// uses this language information to choose the font language for rendering
4820	// the captions text.
4821	LanguageCode *string `locationName:"languageCode" type:"string" enum:"LanguageCode"`
4822
4823	// Specify a label for this set of output captions. For example, "English",
4824	// "Director commentary", or "track_2". For streaming outputs, MediaConvert
4825	// passes this information into destination manifests for display on the end-viewer's
4826	// player device. For outputs in other output groups, the service ignores this
4827	// setting.
4828	LanguageDescription *string `locationName:"languageDescription" type:"string"`
4829}
4830
4831// String returns the string representation
4832func (s CaptionDescriptionPreset) String() string {
4833	return awsutil.Prettify(s)
4834}
4835
4836// GoString returns the string representation
4837func (s CaptionDescriptionPreset) GoString() string {
4838	return s.String()
4839}
4840
4841// Validate inspects the fields of the type to determine if they are valid.
4842func (s *CaptionDescriptionPreset) Validate() error {
4843	invalidParams := request.ErrInvalidParams{Context: "CaptionDescriptionPreset"}
4844	if s.DestinationSettings != nil {
4845		if err := s.DestinationSettings.Validate(); err != nil {
4846			invalidParams.AddNested("DestinationSettings", err.(request.ErrInvalidParams))
4847		}
4848	}
4849
4850	if invalidParams.Len() > 0 {
4851		return invalidParams
4852	}
4853	return nil
4854}
4855
4856// SetCustomLanguageCode sets the CustomLanguageCode field's value.
4857func (s *CaptionDescriptionPreset) SetCustomLanguageCode(v string) *CaptionDescriptionPreset {
4858	s.CustomLanguageCode = &v
4859	return s
4860}
4861
4862// SetDestinationSettings sets the DestinationSettings field's value.
4863func (s *CaptionDescriptionPreset) SetDestinationSettings(v *CaptionDestinationSettings) *CaptionDescriptionPreset {
4864	s.DestinationSettings = v
4865	return s
4866}
4867
4868// SetLanguageCode sets the LanguageCode field's value.
4869func (s *CaptionDescriptionPreset) SetLanguageCode(v string) *CaptionDescriptionPreset {
4870	s.LanguageCode = &v
4871	return s
4872}
4873
4874// SetLanguageDescription sets the LanguageDescription field's value.
4875func (s *CaptionDescriptionPreset) SetLanguageDescription(v string) *CaptionDescriptionPreset {
4876	s.LanguageDescription = &v
4877	return s
4878}
4879
4880// Specific settings required by destination type. Note that burnin_destination_settings
4881// are not available if the source of the caption data is Embedded or Teletext.
4882type CaptionDestinationSettings struct {
4883	_ struct{} `type:"structure"`
4884
4885	// Burn-In Destination Settings.
4886	BurninDestinationSettings *BurninDestinationSettings `locationName:"burninDestinationSettings" type:"structure"`
4887
4888	// Specify the format for this set of captions on this output. The default format
4889	// is embedded without SCTE-20. Other options are embedded with SCTE-20, burn-in,
4890	// DVB-sub, IMSC, SCC, SRT, teletext, TTML, and web-VTT. If you are using SCTE-20,
4891	// choose SCTE-20 plus embedded (SCTE20_PLUS_EMBEDDED) to create an output that
4892	// complies with the SCTE-43 spec. To create a non-compliant output where the
4893	// embedded captions come first, choose Embedded plus SCTE-20 (EMBEDDED_PLUS_SCTE20).
4894	DestinationType *string `locationName:"destinationType" type:"string" enum:"CaptionDestinationType"`
4895
4896	// DVB-Sub Destination Settings
4897	DvbSubDestinationSettings *DvbSubDestinationSettings `locationName:"dvbSubDestinationSettings" type:"structure"`
4898
4899	// Settings specific to embedded/ancillary caption outputs, including 608/708
4900	// Channel destination number.
4901	EmbeddedDestinationSettings *EmbeddedDestinationSettings `locationName:"embeddedDestinationSettings" type:"structure"`
4902
4903	// Settings specific to IMSC caption outputs.
4904	ImscDestinationSettings *ImscDestinationSettings `locationName:"imscDestinationSettings" type:"structure"`
4905
4906	// Settings for SCC caption output.
4907	SccDestinationSettings *SccDestinationSettings `locationName:"sccDestinationSettings" type:"structure"`
4908
4909	// Settings for Teletext caption output
4910	TeletextDestinationSettings *TeletextDestinationSettings `locationName:"teletextDestinationSettings" type:"structure"`
4911
4912	// Settings specific to TTML caption outputs, including Pass style information
4913	// (TtmlStylePassthrough).
4914	TtmlDestinationSettings *TtmlDestinationSettings `locationName:"ttmlDestinationSettings" type:"structure"`
4915}
4916
4917// String returns the string representation
4918func (s CaptionDestinationSettings) String() string {
4919	return awsutil.Prettify(s)
4920}
4921
4922// GoString returns the string representation
4923func (s CaptionDestinationSettings) GoString() string {
4924	return s.String()
4925}
4926
4927// Validate inspects the fields of the type to determine if they are valid.
4928func (s *CaptionDestinationSettings) Validate() error {
4929	invalidParams := request.ErrInvalidParams{Context: "CaptionDestinationSettings"}
4930	if s.BurninDestinationSettings != nil {
4931		if err := s.BurninDestinationSettings.Validate(); err != nil {
4932			invalidParams.AddNested("BurninDestinationSettings", err.(request.ErrInvalidParams))
4933		}
4934	}
4935	if s.DvbSubDestinationSettings != nil {
4936		if err := s.DvbSubDestinationSettings.Validate(); err != nil {
4937			invalidParams.AddNested("DvbSubDestinationSettings", err.(request.ErrInvalidParams))
4938		}
4939	}
4940	if s.EmbeddedDestinationSettings != nil {
4941		if err := s.EmbeddedDestinationSettings.Validate(); err != nil {
4942			invalidParams.AddNested("EmbeddedDestinationSettings", err.(request.ErrInvalidParams))
4943		}
4944	}
4945	if s.TeletextDestinationSettings != nil {
4946		if err := s.TeletextDestinationSettings.Validate(); err != nil {
4947			invalidParams.AddNested("TeletextDestinationSettings", err.(request.ErrInvalidParams))
4948		}
4949	}
4950
4951	if invalidParams.Len() > 0 {
4952		return invalidParams
4953	}
4954	return nil
4955}
4956
4957// SetBurninDestinationSettings sets the BurninDestinationSettings field's value.
4958func (s *CaptionDestinationSettings) SetBurninDestinationSettings(v *BurninDestinationSettings) *CaptionDestinationSettings {
4959	s.BurninDestinationSettings = v
4960	return s
4961}
4962
4963// SetDestinationType sets the DestinationType field's value.
4964func (s *CaptionDestinationSettings) SetDestinationType(v string) *CaptionDestinationSettings {
4965	s.DestinationType = &v
4966	return s
4967}
4968
4969// SetDvbSubDestinationSettings sets the DvbSubDestinationSettings field's value.
4970func (s *CaptionDestinationSettings) SetDvbSubDestinationSettings(v *DvbSubDestinationSettings) *CaptionDestinationSettings {
4971	s.DvbSubDestinationSettings = v
4972	return s
4973}
4974
4975// SetEmbeddedDestinationSettings sets the EmbeddedDestinationSettings field's value.
4976func (s *CaptionDestinationSettings) SetEmbeddedDestinationSettings(v *EmbeddedDestinationSettings) *CaptionDestinationSettings {
4977	s.EmbeddedDestinationSettings = v
4978	return s
4979}
4980
4981// SetImscDestinationSettings sets the ImscDestinationSettings field's value.
4982func (s *CaptionDestinationSettings) SetImscDestinationSettings(v *ImscDestinationSettings) *CaptionDestinationSettings {
4983	s.ImscDestinationSettings = v
4984	return s
4985}
4986
4987// SetSccDestinationSettings sets the SccDestinationSettings field's value.
4988func (s *CaptionDestinationSettings) SetSccDestinationSettings(v *SccDestinationSettings) *CaptionDestinationSettings {
4989	s.SccDestinationSettings = v
4990	return s
4991}
4992
4993// SetTeletextDestinationSettings sets the TeletextDestinationSettings field's value.
4994func (s *CaptionDestinationSettings) SetTeletextDestinationSettings(v *TeletextDestinationSettings) *CaptionDestinationSettings {
4995	s.TeletextDestinationSettings = v
4996	return s
4997}
4998
4999// SetTtmlDestinationSettings sets the TtmlDestinationSettings field's value.
5000func (s *CaptionDestinationSettings) SetTtmlDestinationSettings(v *TtmlDestinationSettings) *CaptionDestinationSettings {
5001	s.TtmlDestinationSettings = v
5002	return s
5003}
5004
5005// Set up captions in your outputs by first selecting them from your input here.
5006type CaptionSelector struct {
5007	_ struct{} `type:"structure"`
5008
5009	// The specific language to extract from source, using the ISO 639-2 or ISO
5010	// 639-3 three-letter language code. If input is SCTE-27, complete this field
5011	// and/or PID to select the caption language to extract. If input is DVB-Sub
5012	// and output is Burn-in or SMPTE-TT, complete this field and/or PID to select
5013	// the caption language to extract. If input is DVB-Sub that is being passed
5014	// through, omit this field (and PID field); there is no way to extract a specific
5015	// language with pass-through captions.
5016	CustomLanguageCode *string `locationName:"customLanguageCode" min:"3" type:"string"`
5017
5018	// The specific language to extract from source. If input is SCTE-27, complete
5019	// this field and/or PID to select the caption language to extract. If input
5020	// is DVB-Sub and output is Burn-in or SMPTE-TT, complete this field and/or
5021	// PID to select the caption language to extract. If input is DVB-Sub that is
5022	// being passed through, omit this field (and PID field); there is no way to
5023	// extract a specific language with pass-through captions.
5024	LanguageCode *string `locationName:"languageCode" type:"string" enum:"LanguageCode"`
5025
5026	// If your input captions are SCC, TTML, STL, SMI, SRT, or IMSC in an xml file,
5027	// specify the URI of the input captions source file. If your input captions
5028	// are IMSC in an IMF package, use TrackSourceSettings instead of FileSoureSettings.
5029	SourceSettings *CaptionSourceSettings `locationName:"sourceSettings" type:"structure"`
5030}
5031
5032// String returns the string representation
5033func (s CaptionSelector) String() string {
5034	return awsutil.Prettify(s)
5035}
5036
5037// GoString returns the string representation
5038func (s CaptionSelector) GoString() string {
5039	return s.String()
5040}
5041
5042// Validate inspects the fields of the type to determine if they are valid.
5043func (s *CaptionSelector) Validate() error {
5044	invalidParams := request.ErrInvalidParams{Context: "CaptionSelector"}
5045	if s.CustomLanguageCode != nil && len(*s.CustomLanguageCode) < 3 {
5046		invalidParams.Add(request.NewErrParamMinLen("CustomLanguageCode", 3))
5047	}
5048	if s.SourceSettings != nil {
5049		if err := s.SourceSettings.Validate(); err != nil {
5050			invalidParams.AddNested("SourceSettings", err.(request.ErrInvalidParams))
5051		}
5052	}
5053
5054	if invalidParams.Len() > 0 {
5055		return invalidParams
5056	}
5057	return nil
5058}
5059
5060// SetCustomLanguageCode sets the CustomLanguageCode field's value.
5061func (s *CaptionSelector) SetCustomLanguageCode(v string) *CaptionSelector {
5062	s.CustomLanguageCode = &v
5063	return s
5064}
5065
5066// SetLanguageCode sets the LanguageCode field's value.
5067func (s *CaptionSelector) SetLanguageCode(v string) *CaptionSelector {
5068	s.LanguageCode = &v
5069	return s
5070}
5071
5072// SetSourceSettings sets the SourceSettings field's value.
5073func (s *CaptionSelector) SetSourceSettings(v *CaptionSourceSettings) *CaptionSelector {
5074	s.SourceSettings = v
5075	return s
5076}
5077
5078// Ignore this setting unless your input captions format is SCC. To have the
5079// service compensate for differing frame rates between your input captions
5080// and input video, specify the frame rate of the captions file. Specify this
5081// value as a fraction, using the settings Framerate numerator (framerateNumerator)
5082// and Framerate denominator (framerateDenominator). For example, you might
5083// specify 24 / 1 for 24 fps, 25 / 1 for 25 fps, 24000 / 1001 for 23.976 fps,
5084// or 30000 / 1001 for 29.97 fps.
5085type CaptionSourceFramerate struct {
5086	_ struct{} `type:"structure"`
5087
5088	// Specify the denominator of the fraction that represents the frame rate for
5089	// the setting Caption source frame rate (CaptionSourceFramerate). Use this
5090	// setting along with the setting Framerate numerator (framerateNumerator).
5091	FramerateDenominator *int64 `locationName:"framerateDenominator" min:"1" type:"integer"`
5092
5093	// Specify the numerator of the fraction that represents the frame rate for
5094	// the setting Caption source frame rate (CaptionSourceFramerate). Use this
5095	// setting along with the setting Framerate denominator (framerateDenominator).
5096	FramerateNumerator *int64 `locationName:"framerateNumerator" min:"1" type:"integer"`
5097}
5098
5099// String returns the string representation
5100func (s CaptionSourceFramerate) String() string {
5101	return awsutil.Prettify(s)
5102}
5103
5104// GoString returns the string representation
5105func (s CaptionSourceFramerate) GoString() string {
5106	return s.String()
5107}
5108
5109// Validate inspects the fields of the type to determine if they are valid.
5110func (s *CaptionSourceFramerate) Validate() error {
5111	invalidParams := request.ErrInvalidParams{Context: "CaptionSourceFramerate"}
5112	if s.FramerateDenominator != nil && *s.FramerateDenominator < 1 {
5113		invalidParams.Add(request.NewErrParamMinValue("FramerateDenominator", 1))
5114	}
5115	if s.FramerateNumerator != nil && *s.FramerateNumerator < 1 {
5116		invalidParams.Add(request.NewErrParamMinValue("FramerateNumerator", 1))
5117	}
5118
5119	if invalidParams.Len() > 0 {
5120		return invalidParams
5121	}
5122	return nil
5123}
5124
5125// SetFramerateDenominator sets the FramerateDenominator field's value.
5126func (s *CaptionSourceFramerate) SetFramerateDenominator(v int64) *CaptionSourceFramerate {
5127	s.FramerateDenominator = &v
5128	return s
5129}
5130
5131// SetFramerateNumerator sets the FramerateNumerator field's value.
5132func (s *CaptionSourceFramerate) SetFramerateNumerator(v int64) *CaptionSourceFramerate {
5133	s.FramerateNumerator = &v
5134	return s
5135}
5136
5137// If your input captions are SCC, TTML, STL, SMI, SRT, or IMSC in an xml file,
5138// specify the URI of the input captions source file. If your input captions
5139// are IMSC in an IMF package, use TrackSourceSettings instead of FileSoureSettings.
5140type CaptionSourceSettings struct {
5141	_ struct{} `type:"structure"`
5142
5143	// Settings for ancillary captions source.
5144	AncillarySourceSettings *AncillarySourceSettings `locationName:"ancillarySourceSettings" type:"structure"`
5145
5146	// DVB Sub Source Settings
5147	DvbSubSourceSettings *DvbSubSourceSettings `locationName:"dvbSubSourceSettings" type:"structure"`
5148
5149	// Settings for embedded captions Source
5150	EmbeddedSourceSettings *EmbeddedSourceSettings `locationName:"embeddedSourceSettings" type:"structure"`
5151
5152	// If your input captions are SCC, SMI, SRT, STL, TTML, or IMSC 1.1 in an xml
5153	// file, specify the URI of the input caption source file. If your caption source
5154	// is IMSC in an IMF package, use TrackSourceSettings instead of FileSoureSettings.
5155	FileSourceSettings *FileSourceSettings `locationName:"fileSourceSettings" type:"structure"`
5156
5157	// Use Source (SourceType) to identify the format of your input captions. The
5158	// service cannot auto-detect caption format.
5159	SourceType *string `locationName:"sourceType" type:"string" enum:"CaptionSourceType"`
5160
5161	// Settings specific to Teletext caption sources, including Page number.
5162	TeletextSourceSettings *TeletextSourceSettings `locationName:"teletextSourceSettings" type:"structure"`
5163
5164	// Settings specific to caption sources that are specified by track number.
5165	// Currently, this is only IMSC captions in an IMF package. If your caption
5166	// source is IMSC 1.1 in a separate xml file, use FileSourceSettings instead
5167	// of TrackSourceSettings.
5168	TrackSourceSettings *TrackSourceSettings `locationName:"trackSourceSettings" type:"structure"`
5169}
5170
5171// String returns the string representation
5172func (s CaptionSourceSettings) String() string {
5173	return awsutil.Prettify(s)
5174}
5175
5176// GoString returns the string representation
5177func (s CaptionSourceSettings) GoString() string {
5178	return s.String()
5179}
5180
5181// Validate inspects the fields of the type to determine if they are valid.
5182func (s *CaptionSourceSettings) Validate() error {
5183	invalidParams := request.ErrInvalidParams{Context: "CaptionSourceSettings"}
5184	if s.AncillarySourceSettings != nil {
5185		if err := s.AncillarySourceSettings.Validate(); err != nil {
5186			invalidParams.AddNested("AncillarySourceSettings", err.(request.ErrInvalidParams))
5187		}
5188	}
5189	if s.DvbSubSourceSettings != nil {
5190		if err := s.DvbSubSourceSettings.Validate(); err != nil {
5191			invalidParams.AddNested("DvbSubSourceSettings", err.(request.ErrInvalidParams))
5192		}
5193	}
5194	if s.EmbeddedSourceSettings != nil {
5195		if err := s.EmbeddedSourceSettings.Validate(); err != nil {
5196			invalidParams.AddNested("EmbeddedSourceSettings", err.(request.ErrInvalidParams))
5197		}
5198	}
5199	if s.FileSourceSettings != nil {
5200		if err := s.FileSourceSettings.Validate(); err != nil {
5201			invalidParams.AddNested("FileSourceSettings", err.(request.ErrInvalidParams))
5202		}
5203	}
5204	if s.TeletextSourceSettings != nil {
5205		if err := s.TeletextSourceSettings.Validate(); err != nil {
5206			invalidParams.AddNested("TeletextSourceSettings", err.(request.ErrInvalidParams))
5207		}
5208	}
5209	if s.TrackSourceSettings != nil {
5210		if err := s.TrackSourceSettings.Validate(); err != nil {
5211			invalidParams.AddNested("TrackSourceSettings", err.(request.ErrInvalidParams))
5212		}
5213	}
5214
5215	if invalidParams.Len() > 0 {
5216		return invalidParams
5217	}
5218	return nil
5219}
5220
5221// SetAncillarySourceSettings sets the AncillarySourceSettings field's value.
5222func (s *CaptionSourceSettings) SetAncillarySourceSettings(v *AncillarySourceSettings) *CaptionSourceSettings {
5223	s.AncillarySourceSettings = v
5224	return s
5225}
5226
5227// SetDvbSubSourceSettings sets the DvbSubSourceSettings field's value.
5228func (s *CaptionSourceSettings) SetDvbSubSourceSettings(v *DvbSubSourceSettings) *CaptionSourceSettings {
5229	s.DvbSubSourceSettings = v
5230	return s
5231}
5232
5233// SetEmbeddedSourceSettings sets the EmbeddedSourceSettings field's value.
5234func (s *CaptionSourceSettings) SetEmbeddedSourceSettings(v *EmbeddedSourceSettings) *CaptionSourceSettings {
5235	s.EmbeddedSourceSettings = v
5236	return s
5237}
5238
5239// SetFileSourceSettings sets the FileSourceSettings field's value.
5240func (s *CaptionSourceSettings) SetFileSourceSettings(v *FileSourceSettings) *CaptionSourceSettings {
5241	s.FileSourceSettings = v
5242	return s
5243}
5244
5245// SetSourceType sets the SourceType field's value.
5246func (s *CaptionSourceSettings) SetSourceType(v string) *CaptionSourceSettings {
5247	s.SourceType = &v
5248	return s
5249}
5250
5251// SetTeletextSourceSettings sets the TeletextSourceSettings field's value.
5252func (s *CaptionSourceSettings) SetTeletextSourceSettings(v *TeletextSourceSettings) *CaptionSourceSettings {
5253	s.TeletextSourceSettings = v
5254	return s
5255}
5256
5257// SetTrackSourceSettings sets the TrackSourceSettings field's value.
5258func (s *CaptionSourceSettings) SetTrackSourceSettings(v *TrackSourceSettings) *CaptionSourceSettings {
5259	s.TrackSourceSettings = v
5260	return s
5261}
5262
5263// Channel mapping (ChannelMapping) contains the group of fields that hold the
5264// remixing value for each channel, in dB. Specify remix values to indicate
5265// how much of the content from your input audio channel you want in your output
5266// audio channels. Each instance of the InputChannels or InputChannelsFineTune
5267// array specifies these values for one output channel. Use one instance of
5268// this array for each output channel. In the console, each array corresponds
5269// to a column in the graphical depiction of the mapping matrix. The rows of
5270// the graphical matrix correspond to input channels. Valid values are within
5271// the range from -60 (mute) through 6. A setting of 0 passes the input channel
5272// unchanged to the output channel (no attenuation or amplification). Use InputChannels
5273// or InputChannelsFineTune to specify your remix values. Don't use both.
5274type ChannelMapping struct {
5275	_ struct{} `type:"structure"`
5276
5277	// In your JSON job specification, include one child of OutputChannels for each
5278	// audio channel that you want in your output. Each child should contain one
5279	// instance of InputChannels or InputChannelsFineTune.
5280	OutputChannels []*OutputChannelMapping `locationName:"outputChannels" type:"list"`
5281}
5282
5283// String returns the string representation
5284func (s ChannelMapping) String() string {
5285	return awsutil.Prettify(s)
5286}
5287
5288// GoString returns the string representation
5289func (s ChannelMapping) GoString() string {
5290	return s.String()
5291}
5292
5293// SetOutputChannels sets the OutputChannels field's value.
5294func (s *ChannelMapping) SetOutputChannels(v []*OutputChannelMapping) *ChannelMapping {
5295	s.OutputChannels = v
5296	return s
5297}
5298
5299// Specify the details for each pair of HLS and DASH additional manifests that
5300// you want the service to generate for this CMAF output group. Each pair of
5301// manifests can reference a different subset of outputs in the group.
5302type CmafAdditionalManifest struct {
5303	_ struct{} `type:"structure"`
5304
5305	// Specify a name modifier that the service adds to the name of this manifest
5306	// to make it different from the file names of the other main manifests in the
5307	// output group. For example, say that the default main manifest for your HLS
5308	// group is film-name.m3u8. If you enter "-no-premium" for this setting, then
5309	// the file name the service generates for this top-level manifest is film-name-no-premium.m3u8.
5310	// For HLS output groups, specify a manifestNameModifier that is different from
5311	// the nameModifier of the output. The service uses the output name modifier
5312	// to create unique names for the individual variant manifests.
5313	ManifestNameModifier *string `locationName:"manifestNameModifier" min:"1" type:"string"`
5314
5315	// Specify the outputs that you want this additional top-level manifest to reference.
5316	SelectedOutputs []*string `locationName:"selectedOutputs" type:"list"`
5317}
5318
5319// String returns the string representation
5320func (s CmafAdditionalManifest) String() string {
5321	return awsutil.Prettify(s)
5322}
5323
5324// GoString returns the string representation
5325func (s CmafAdditionalManifest) GoString() string {
5326	return s.String()
5327}
5328
5329// Validate inspects the fields of the type to determine if they are valid.
5330func (s *CmafAdditionalManifest) Validate() error {
5331	invalidParams := request.ErrInvalidParams{Context: "CmafAdditionalManifest"}
5332	if s.ManifestNameModifier != nil && len(*s.ManifestNameModifier) < 1 {
5333		invalidParams.Add(request.NewErrParamMinLen("ManifestNameModifier", 1))
5334	}
5335
5336	if invalidParams.Len() > 0 {
5337		return invalidParams
5338	}
5339	return nil
5340}
5341
5342// SetManifestNameModifier sets the ManifestNameModifier field's value.
5343func (s *CmafAdditionalManifest) SetManifestNameModifier(v string) *CmafAdditionalManifest {
5344	s.ManifestNameModifier = &v
5345	return s
5346}
5347
5348// SetSelectedOutputs sets the SelectedOutputs field's value.
5349func (s *CmafAdditionalManifest) SetSelectedOutputs(v []*string) *CmafAdditionalManifest {
5350	s.SelectedOutputs = v
5351	return s
5352}
5353
5354// Settings for CMAF encryption
5355type CmafEncryptionSettings struct {
5356	_ struct{} `type:"structure"`
5357
5358	// This is a 128-bit, 16-byte hex value represented by a 32-character text string.
5359	// If this parameter is not set then the Initialization Vector will follow the
5360	// segment number by default.
5361	ConstantInitializationVector *string `locationName:"constantInitializationVector" min:"32" type:"string"`
5362
5363	// Specify the encryption scheme that you want the service to use when encrypting
5364	// your CMAF segments. Choose AES-CBC subsample (SAMPLE-AES) or AES_CTR (AES-CTR).
5365	EncryptionMethod *string `locationName:"encryptionMethod" type:"string" enum:"CmafEncryptionType"`
5366
5367	// When you use DRM with CMAF outputs, choose whether the service writes the
5368	// 128-bit encryption initialization vector in the HLS and DASH manifests.
5369	InitializationVectorInManifest *string `locationName:"initializationVectorInManifest" type:"string" enum:"CmafInitializationVectorInManifest"`
5370
5371	// If your output group type is CMAF, use these settings when doing DRM encryption
5372	// with a SPEKE-compliant key provider. If your output group type is HLS, DASH,
5373	// or Microsoft Smooth, use the SpekeKeyProvider settings instead.
5374	SpekeKeyProvider *SpekeKeyProviderCmaf `locationName:"spekeKeyProvider" type:"structure"`
5375
5376	// Use these settings to set up encryption with a static key provider.
5377	StaticKeyProvider *StaticKeyProvider `locationName:"staticKeyProvider" type:"structure"`
5378
5379	// Specify whether your DRM encryption key is static or from a key provider
5380	// that follows the SPEKE standard. For more information about SPEKE, see https://docs.aws.amazon.com/speke/latest/documentation/what-is-speke.html.
5381	Type *string `locationName:"type" type:"string" enum:"CmafKeyProviderType"`
5382}
5383
5384// String returns the string representation
5385func (s CmafEncryptionSettings) String() string {
5386	return awsutil.Prettify(s)
5387}
5388
5389// GoString returns the string representation
5390func (s CmafEncryptionSettings) GoString() string {
5391	return s.String()
5392}
5393
5394// Validate inspects the fields of the type to determine if they are valid.
5395func (s *CmafEncryptionSettings) Validate() error {
5396	invalidParams := request.ErrInvalidParams{Context: "CmafEncryptionSettings"}
5397	if s.ConstantInitializationVector != nil && len(*s.ConstantInitializationVector) < 32 {
5398		invalidParams.Add(request.NewErrParamMinLen("ConstantInitializationVector", 32))
5399	}
5400
5401	if invalidParams.Len() > 0 {
5402		return invalidParams
5403	}
5404	return nil
5405}
5406
5407// SetConstantInitializationVector sets the ConstantInitializationVector field's value.
5408func (s *CmafEncryptionSettings) SetConstantInitializationVector(v string) *CmafEncryptionSettings {
5409	s.ConstantInitializationVector = &v
5410	return s
5411}
5412
5413// SetEncryptionMethod sets the EncryptionMethod field's value.
5414func (s *CmafEncryptionSettings) SetEncryptionMethod(v string) *CmafEncryptionSettings {
5415	s.EncryptionMethod = &v
5416	return s
5417}
5418
5419// SetInitializationVectorInManifest sets the InitializationVectorInManifest field's value.
5420func (s *CmafEncryptionSettings) SetInitializationVectorInManifest(v string) *CmafEncryptionSettings {
5421	s.InitializationVectorInManifest = &v
5422	return s
5423}
5424
5425// SetSpekeKeyProvider sets the SpekeKeyProvider field's value.
5426func (s *CmafEncryptionSettings) SetSpekeKeyProvider(v *SpekeKeyProviderCmaf) *CmafEncryptionSettings {
5427	s.SpekeKeyProvider = v
5428	return s
5429}
5430
5431// SetStaticKeyProvider sets the StaticKeyProvider field's value.
5432func (s *CmafEncryptionSettings) SetStaticKeyProvider(v *StaticKeyProvider) *CmafEncryptionSettings {
5433	s.StaticKeyProvider = v
5434	return s
5435}
5436
5437// SetType sets the Type field's value.
5438func (s *CmafEncryptionSettings) SetType(v string) *CmafEncryptionSettings {
5439	s.Type = &v
5440	return s
5441}
5442
5443// Required when you set (Type) under (OutputGroups)>(OutputGroupSettings) to
5444// CMAF_GROUP_SETTINGS. Each output in a CMAF Output Group may only contain
5445// a single video, audio, or caption output.
5446type CmafGroupSettings struct {
5447	_ struct{} `type:"structure"`
5448
5449	// By default, the service creates one top-level .m3u8 HLS manifest and one
5450	// top -level .mpd DASH manifest for each CMAF output group in your job. These
5451	// default manifests reference every output in the output group. To create additional
5452	// top-level manifests that reference a subset of the outputs in the output
5453	// group, specify a list of them here. For each additional manifest that you
5454	// specify, the service creates one HLS manifest and one DASH manifest.
5455	AdditionalManifests []*CmafAdditionalManifest `locationName:"additionalManifests" type:"list"`
5456
5457	// A partial URI prefix that will be put in the manifest file at the top level
5458	// BaseURL element. Can be used if streams are delivered from a different URL
5459	// than the manifest file.
5460	BaseUrl *string `locationName:"baseUrl" type:"string"`
5461
5462	// Disable this setting only when your workflow requires the #EXT-X-ALLOW-CACHE:no
5463	// tag. Otherwise, keep the default value Enabled (ENABLED) and control caching
5464	// in your video distribution set up. For example, use the Cache-Control http
5465	// header.
5466	ClientCache *string `locationName:"clientCache" type:"string" enum:"CmafClientCache"`
5467
5468	// Specification to use (RFC-6381 or the default RFC-4281) during m3u8 playlist
5469	// generation.
5470	CodecSpecification *string `locationName:"codecSpecification" type:"string" enum:"CmafCodecSpecification"`
5471
5472	// Use Destination (Destination) to specify the S3 output location and the output
5473	// filename base. Destination accepts format identifiers. If you do not specify
5474	// the base filename in the URI, the service will use the filename of the input
5475	// file. If your job has multiple inputs, the service uses the filename of the
5476	// first input file.
5477	Destination *string `locationName:"destination" type:"string"`
5478
5479	// Settings associated with the destination. Will vary based on the type of
5480	// destination
5481	DestinationSettings *DestinationSettings `locationName:"destinationSettings" type:"structure"`
5482
5483	// DRM settings.
5484	Encryption *CmafEncryptionSettings `locationName:"encryption" type:"structure"`
5485
5486	// Length of fragments to generate (in seconds). Fragment length must be compatible
5487	// with GOP size and Framerate. Note that fragments will end on the next keyframe
5488	// after this number of seconds, so actual fragment length may be longer. When
5489	// Emit Single File is checked, the fragmentation is internal to a single output
5490	// file and it does not cause the creation of many output files as in other
5491	// output types.
5492	FragmentLength *int64 `locationName:"fragmentLength" min:"1" type:"integer"`
5493
5494	// When set to GZIP, compresses HLS playlist.
5495	ManifestCompression *string `locationName:"manifestCompression" type:"string" enum:"CmafManifestCompression"`
5496
5497	// Indicates whether the output manifest should use floating point values for
5498	// segment duration.
5499	ManifestDurationFormat *string `locationName:"manifestDurationFormat" type:"string" enum:"CmafManifestDurationFormat"`
5500
5501	// Minimum time of initially buffered media that is needed to ensure smooth
5502	// playout.
5503	MinBufferTime *int64 `locationName:"minBufferTime" type:"integer"`
5504
5505	// Keep this setting at the default value of 0, unless you are troubleshooting
5506	// a problem with how devices play back the end of your video asset. If you
5507	// know that player devices are hanging on the final segment of your video because
5508	// the length of your final segment is too short, use this setting to specify
5509	// a minimum final segment length, in seconds. Choose a value that is greater
5510	// than or equal to 1 and less than your segment length. When you specify a
5511	// value for this setting, the encoder will combine any final segment that is
5512	// shorter than the length that you specify with the previous segment. For example,
5513	// your segment length is 3 seconds and your final segment is .5 seconds without
5514	// a minimum final segment length; when you set the minimum final segment length
5515	// to 1, your final segment is 3.5 seconds.
5516	MinFinalSegmentLength *float64 `locationName:"minFinalSegmentLength" type:"double"`
5517
5518	// Specify whether your DASH profile is on-demand or main. When you choose Main
5519	// profile (MAIN_PROFILE), the service signals urn:mpeg:dash:profile:isoff-main:2011
5520	// in your .mpd DASH manifest. When you choose On-demand (ON_DEMAND_PROFILE),
5521	// the service signals urn:mpeg:dash:profile:isoff-on-demand:2011 in your .mpd.
5522	// When you choose On-demand, you must also set the output group setting Segment
5523	// control (SegmentControl) to Single file (SINGLE_FILE).
5524	MpdProfile *string `locationName:"mpdProfile" type:"string" enum:"CmafMpdProfile"`
5525
5526	// When set to SINGLE_FILE, a single output file is generated, which is internally
5527	// segmented using the Fragment Length and Segment Length. When set to SEGMENTED_FILES,
5528	// separate segment files will be created.
5529	SegmentControl *string `locationName:"segmentControl" type:"string" enum:"CmafSegmentControl"`
5530
5531	// Use this setting to specify the length, in seconds, of each individual CMAF
5532	// segment. This value applies to the whole package; that is, to every output
5533	// in the output group. Note that segments end on the first keyframe after this
5534	// number of seconds, so the actual segment length might be slightly longer.
5535	// If you set Segment control (CmafSegmentControl) to single file, the service
5536	// puts the content of each output in a single file that has metadata that marks
5537	// these segments. If you set it to segmented files, the service creates multiple
5538	// files for each output, each with the content of one segment.
5539	SegmentLength *int64 `locationName:"segmentLength" min:"1" type:"integer"`
5540
5541	// Include or exclude RESOLUTION attribute for video in EXT-X-STREAM-INF tag
5542	// of variant manifest.
5543	StreamInfResolution *string `locationName:"streamInfResolution" type:"string" enum:"CmafStreamInfResolution"`
5544
5545	// When set to ENABLED, a DASH MPD manifest will be generated for this output.
5546	WriteDashManifest *string `locationName:"writeDashManifest" type:"string" enum:"CmafWriteDASHManifest"`
5547
5548	// When set to ENABLED, an Apple HLS manifest will be generated for this output.
5549	WriteHlsManifest *string `locationName:"writeHlsManifest" type:"string" enum:"CmafWriteHLSManifest"`
5550
5551	// When you enable Precise segment duration in DASH manifests (writeSegmentTimelineInRepresentation),
5552	// your DASH manifest shows precise segment durations. The segment duration
5553	// information appears inside the SegmentTimeline element, inside SegmentTemplate
5554	// at the Representation level. When this feature isn't enabled, the segment
5555	// durations in your DASH manifest are approximate. The segment duration information
5556	// appears in the duration attribute of the SegmentTemplate element.
5557	WriteSegmentTimelineInRepresentation *string `locationName:"writeSegmentTimelineInRepresentation" type:"string" enum:"CmafWriteSegmentTimelineInRepresentation"`
5558}
5559
5560// String returns the string representation
5561func (s CmafGroupSettings) String() string {
5562	return awsutil.Prettify(s)
5563}
5564
5565// GoString returns the string representation
5566func (s CmafGroupSettings) GoString() string {
5567	return s.String()
5568}
5569
5570// Validate inspects the fields of the type to determine if they are valid.
5571func (s *CmafGroupSettings) Validate() error {
5572	invalidParams := request.ErrInvalidParams{Context: "CmafGroupSettings"}
5573	if s.FragmentLength != nil && *s.FragmentLength < 1 {
5574		invalidParams.Add(request.NewErrParamMinValue("FragmentLength", 1))
5575	}
5576	if s.SegmentLength != nil && *s.SegmentLength < 1 {
5577		invalidParams.Add(request.NewErrParamMinValue("SegmentLength", 1))
5578	}
5579	if s.AdditionalManifests != nil {
5580		for i, v := range s.AdditionalManifests {
5581			if v == nil {
5582				continue
5583			}
5584			if err := v.Validate(); err != nil {
5585				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "AdditionalManifests", i), err.(request.ErrInvalidParams))
5586			}
5587		}
5588	}
5589	if s.Encryption != nil {
5590		if err := s.Encryption.Validate(); err != nil {
5591			invalidParams.AddNested("Encryption", err.(request.ErrInvalidParams))
5592		}
5593	}
5594
5595	if invalidParams.Len() > 0 {
5596		return invalidParams
5597	}
5598	return nil
5599}
5600
5601// SetAdditionalManifests sets the AdditionalManifests field's value.
5602func (s *CmafGroupSettings) SetAdditionalManifests(v []*CmafAdditionalManifest) *CmafGroupSettings {
5603	s.AdditionalManifests = v
5604	return s
5605}
5606
5607// SetBaseUrl sets the BaseUrl field's value.
5608func (s *CmafGroupSettings) SetBaseUrl(v string) *CmafGroupSettings {
5609	s.BaseUrl = &v
5610	return s
5611}
5612
5613// SetClientCache sets the ClientCache field's value.
5614func (s *CmafGroupSettings) SetClientCache(v string) *CmafGroupSettings {
5615	s.ClientCache = &v
5616	return s
5617}
5618
5619// SetCodecSpecification sets the CodecSpecification field's value.
5620func (s *CmafGroupSettings) SetCodecSpecification(v string) *CmafGroupSettings {
5621	s.CodecSpecification = &v
5622	return s
5623}
5624
5625// SetDestination sets the Destination field's value.
5626func (s *CmafGroupSettings) SetDestination(v string) *CmafGroupSettings {
5627	s.Destination = &v
5628	return s
5629}
5630
5631// SetDestinationSettings sets the DestinationSettings field's value.
5632func (s *CmafGroupSettings) SetDestinationSettings(v *DestinationSettings) *CmafGroupSettings {
5633	s.DestinationSettings = v
5634	return s
5635}
5636
5637// SetEncryption sets the Encryption field's value.
5638func (s *CmafGroupSettings) SetEncryption(v *CmafEncryptionSettings) *CmafGroupSettings {
5639	s.Encryption = v
5640	return s
5641}
5642
5643// SetFragmentLength sets the FragmentLength field's value.
5644func (s *CmafGroupSettings) SetFragmentLength(v int64) *CmafGroupSettings {
5645	s.FragmentLength = &v
5646	return s
5647}
5648
5649// SetManifestCompression sets the ManifestCompression field's value.
5650func (s *CmafGroupSettings) SetManifestCompression(v string) *CmafGroupSettings {
5651	s.ManifestCompression = &v
5652	return s
5653}
5654
5655// SetManifestDurationFormat sets the ManifestDurationFormat field's value.
5656func (s *CmafGroupSettings) SetManifestDurationFormat(v string) *CmafGroupSettings {
5657	s.ManifestDurationFormat = &v
5658	return s
5659}
5660
5661// SetMinBufferTime sets the MinBufferTime field's value.
5662func (s *CmafGroupSettings) SetMinBufferTime(v int64) *CmafGroupSettings {
5663	s.MinBufferTime = &v
5664	return s
5665}
5666
5667// SetMinFinalSegmentLength sets the MinFinalSegmentLength field's value.
5668func (s *CmafGroupSettings) SetMinFinalSegmentLength(v float64) *CmafGroupSettings {
5669	s.MinFinalSegmentLength = &v
5670	return s
5671}
5672
5673// SetMpdProfile sets the MpdProfile field's value.
5674func (s *CmafGroupSettings) SetMpdProfile(v string) *CmafGroupSettings {
5675	s.MpdProfile = &v
5676	return s
5677}
5678
5679// SetSegmentControl sets the SegmentControl field's value.
5680func (s *CmafGroupSettings) SetSegmentControl(v string) *CmafGroupSettings {
5681	s.SegmentControl = &v
5682	return s
5683}
5684
5685// SetSegmentLength sets the SegmentLength field's value.
5686func (s *CmafGroupSettings) SetSegmentLength(v int64) *CmafGroupSettings {
5687	s.SegmentLength = &v
5688	return s
5689}
5690
5691// SetStreamInfResolution sets the StreamInfResolution field's value.
5692func (s *CmafGroupSettings) SetStreamInfResolution(v string) *CmafGroupSettings {
5693	s.StreamInfResolution = &v
5694	return s
5695}
5696
5697// SetWriteDashManifest sets the WriteDashManifest field's value.
5698func (s *CmafGroupSettings) SetWriteDashManifest(v string) *CmafGroupSettings {
5699	s.WriteDashManifest = &v
5700	return s
5701}
5702
5703// SetWriteHlsManifest sets the WriteHlsManifest field's value.
5704func (s *CmafGroupSettings) SetWriteHlsManifest(v string) *CmafGroupSettings {
5705	s.WriteHlsManifest = &v
5706	return s
5707}
5708
5709// SetWriteSegmentTimelineInRepresentation sets the WriteSegmentTimelineInRepresentation field's value.
5710func (s *CmafGroupSettings) SetWriteSegmentTimelineInRepresentation(v string) *CmafGroupSettings {
5711	s.WriteSegmentTimelineInRepresentation = &v
5712	return s
5713}
5714
5715// Settings for MP4 segments in CMAF
5716type CmfcSettings struct {
5717	_ struct{} `type:"structure"`
5718
5719	// Specify this setting only when your output will be consumed by a downstream
5720	// repackaging workflow that is sensitive to very small duration differences
5721	// between video and audio. For this situation, choose Match video duration
5722	// (MATCH_VIDEO_DURATION). In all other cases, keep the default value, Default
5723	// codec duration (DEFAULT_CODEC_DURATION). When you choose Match video duration,
5724	// MediaConvert pads the output audio streams with silence or trims them to
5725	// ensure that the total duration of each audio stream is at least as long as
5726	// the total duration of the video stream. After padding or trimming, the audio
5727	// stream duration is no more than one frame longer than the video stream. MediaConvert
5728	// applies audio padding or trimming only to the end of the last segment of
5729	// the output. For unsegmented outputs, MediaConvert adds padding only to the
5730	// end of the file. When you keep the default value, any minor discrepancies
5731	// between audio and video duration will depend on your output audio codec.
5732	AudioDuration *string `locationName:"audioDuration" type:"string" enum:"CmfcAudioDuration"`
5733
5734	// Choose Include (INCLUDE) to have MediaConvert generate an HLS child manifest
5735	// that lists only the I-frames for this rendition, in addition to your regular
5736	// manifest for this rendition. You might use this manifest as part of a workflow
5737	// that creates preview functions for your video. MediaConvert adds both the
5738	// I-frame only child manifest and the regular child manifest to the parent
5739	// manifest. When you don't need the I-frame only child manifest, keep the default
5740	// value Exclude (EXCLUDE).
5741	IFrameOnlyManifest *string `locationName:"iFrameOnlyManifest" type:"string" enum:"CmfcIFrameOnlyManifest"`
5742
5743	// Use this setting only when you specify SCTE-35 markers from ESAM. Choose
5744	// INSERT to put SCTE-35 markers in this output at the insertion points that
5745	// you specify in an ESAM XML document. Provide the document in the setting
5746	// SCC XML (sccXml).
5747	Scte35Esam *string `locationName:"scte35Esam" type:"string" enum:"CmfcScte35Esam"`
5748
5749	// Ignore this setting unless you have SCTE-35 markers in your input video file.
5750	// Choose Passthrough (PASSTHROUGH) if you want SCTE-35 markers that appear
5751	// in your input to also appear in this output. Choose None (NONE) if you don't
5752	// want those SCTE-35 markers in this output.
5753	Scte35Source *string `locationName:"scte35Source" type:"string" enum:"CmfcScte35Source"`
5754}
5755
5756// String returns the string representation
5757func (s CmfcSettings) String() string {
5758	return awsutil.Prettify(s)
5759}
5760
5761// GoString returns the string representation
5762func (s CmfcSettings) GoString() string {
5763	return s.String()
5764}
5765
5766// SetAudioDuration sets the AudioDuration field's value.
5767func (s *CmfcSettings) SetAudioDuration(v string) *CmfcSettings {
5768	s.AudioDuration = &v
5769	return s
5770}
5771
5772// SetIFrameOnlyManifest sets the IFrameOnlyManifest field's value.
5773func (s *CmfcSettings) SetIFrameOnlyManifest(v string) *CmfcSettings {
5774	s.IFrameOnlyManifest = &v
5775	return s
5776}
5777
5778// SetScte35Esam sets the Scte35Esam field's value.
5779func (s *CmfcSettings) SetScte35Esam(v string) *CmfcSettings {
5780	s.Scte35Esam = &v
5781	return s
5782}
5783
5784// SetScte35Source sets the Scte35Source field's value.
5785func (s *CmfcSettings) SetScte35Source(v string) *CmfcSettings {
5786	s.Scte35Source = &v
5787	return s
5788}
5789
5790// Settings for color correction.
5791type ColorCorrector struct {
5792	_ struct{} `type:"structure"`
5793
5794	// Brightness level.
5795	Brightness *int64 `locationName:"brightness" min:"1" type:"integer"`
5796
5797	// Specify the color space you want for this output. The service supports conversion
5798	// between HDR formats, between SDR formats, from SDR to HDR, and from HDR to
5799	// SDR. SDR to HDR conversion doesn't upgrade the dynamic range. The converted
5800	// video has an HDR format, but visually appears the same as an unconverted
5801	// output. HDR to SDR conversion uses Elemental tone mapping technology to approximate
5802	// the outcome of manually regrading from HDR to SDR.
5803	ColorSpaceConversion *string `locationName:"colorSpaceConversion" type:"string" enum:"ColorSpaceConversion"`
5804
5805	// Contrast level.
5806	Contrast *int64 `locationName:"contrast" min:"1" type:"integer"`
5807
5808	// Use these settings when you convert to the HDR 10 color space. Specify the
5809	// SMPTE ST 2086 Mastering Display Color Volume static metadata that you want
5810	// signaled in the output. These values don't affect the pixel values that are
5811	// encoded in the video stream. They are intended to help the downstream video
5812	// player display content in a way that reflects the intentions of the the content
5813	// creator. When you set Color space conversion (ColorSpaceConversion) to HDR
5814	// 10 (FORCE_HDR10), these settings are required. You must set values for Max
5815	// frame average light level (maxFrameAverageLightLevel) and Max content light
5816	// level (maxContentLightLevel); these settings don't have a default value.
5817	// The default values for the other HDR 10 metadata settings are defined by
5818	// the P3D65 color space. For more information about MediaConvert HDR jobs,
5819	// see https://docs.aws.amazon.com/console/mediaconvert/hdr.
5820	Hdr10Metadata *Hdr10Metadata `locationName:"hdr10Metadata" type:"structure"`
5821
5822	// Hue in degrees.
5823	Hue *int64 `locationName:"hue" type:"integer"`
5824
5825	// Saturation level.
5826	Saturation *int64 `locationName:"saturation" min:"1" type:"integer"`
5827}
5828
5829// String returns the string representation
5830func (s ColorCorrector) String() string {
5831	return awsutil.Prettify(s)
5832}
5833
5834// GoString returns the string representation
5835func (s ColorCorrector) GoString() string {
5836	return s.String()
5837}
5838
5839// Validate inspects the fields of the type to determine if they are valid.
5840func (s *ColorCorrector) Validate() error {
5841	invalidParams := request.ErrInvalidParams{Context: "ColorCorrector"}
5842	if s.Brightness != nil && *s.Brightness < 1 {
5843		invalidParams.Add(request.NewErrParamMinValue("Brightness", 1))
5844	}
5845	if s.Contrast != nil && *s.Contrast < 1 {
5846		invalidParams.Add(request.NewErrParamMinValue("Contrast", 1))
5847	}
5848	if s.Hue != nil && *s.Hue < -180 {
5849		invalidParams.Add(request.NewErrParamMinValue("Hue", -180))
5850	}
5851	if s.Saturation != nil && *s.Saturation < 1 {
5852		invalidParams.Add(request.NewErrParamMinValue("Saturation", 1))
5853	}
5854
5855	if invalidParams.Len() > 0 {
5856		return invalidParams
5857	}
5858	return nil
5859}
5860
5861// SetBrightness sets the Brightness field's value.
5862func (s *ColorCorrector) SetBrightness(v int64) *ColorCorrector {
5863	s.Brightness = &v
5864	return s
5865}
5866
5867// SetColorSpaceConversion sets the ColorSpaceConversion field's value.
5868func (s *ColorCorrector) SetColorSpaceConversion(v string) *ColorCorrector {
5869	s.ColorSpaceConversion = &v
5870	return s
5871}
5872
5873// SetContrast sets the Contrast field's value.
5874func (s *ColorCorrector) SetContrast(v int64) *ColorCorrector {
5875	s.Contrast = &v
5876	return s
5877}
5878
5879// SetHdr10Metadata sets the Hdr10Metadata field's value.
5880func (s *ColorCorrector) SetHdr10Metadata(v *Hdr10Metadata) *ColorCorrector {
5881	s.Hdr10Metadata = v
5882	return s
5883}
5884
5885// SetHue sets the Hue field's value.
5886func (s *ColorCorrector) SetHue(v int64) *ColorCorrector {
5887	s.Hue = &v
5888	return s
5889}
5890
5891// SetSaturation sets the Saturation field's value.
5892func (s *ColorCorrector) SetSaturation(v int64) *ColorCorrector {
5893	s.Saturation = &v
5894	return s
5895}
5896
5897type ConflictException struct {
5898	_            struct{}                  `type:"structure"`
5899	RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"`
5900
5901	Message_ *string `locationName:"message" type:"string"`
5902}
5903
5904// String returns the string representation
5905func (s ConflictException) String() string {
5906	return awsutil.Prettify(s)
5907}
5908
5909// GoString returns the string representation
5910func (s ConflictException) GoString() string {
5911	return s.String()
5912}
5913
5914func newErrorConflictException(v protocol.ResponseMetadata) error {
5915	return &ConflictException{
5916		RespMetadata: v,
5917	}
5918}
5919
5920// Code returns the exception type name.
5921func (s *ConflictException) Code() string {
5922	return "ConflictException"
5923}
5924
5925// Message returns the exception's message.
5926func (s *ConflictException) Message() string {
5927	if s.Message_ != nil {
5928		return *s.Message_
5929	}
5930	return ""
5931}
5932
5933// OrigErr always returns nil, satisfies awserr.Error interface.
5934func (s *ConflictException) OrigErr() error {
5935	return nil
5936}
5937
5938func (s *ConflictException) Error() string {
5939	return fmt.Sprintf("%s: %s", s.Code(), s.Message())
5940}
5941
5942// Status code returns the HTTP status code for the request's response error.
5943func (s *ConflictException) StatusCode() int {
5944	return s.RespMetadata.StatusCode
5945}
5946
5947// RequestID returns the service's response RequestID for request.
5948func (s *ConflictException) RequestID() string {
5949	return s.RespMetadata.RequestID
5950}
5951
5952// Container specific settings.
5953type ContainerSettings struct {
5954	_ struct{} `type:"structure"`
5955
5956	// Settings for MP4 segments in CMAF
5957	CmfcSettings *CmfcSettings `locationName:"cmfcSettings" type:"structure"`
5958
5959	// Container for this output. Some containers require a container settings object.
5960	// If not specified, the default object will be created.
5961	Container *string `locationName:"container" type:"string" enum:"ContainerType"`
5962
5963	// Settings for F4v container
5964	F4vSettings *F4vSettings `locationName:"f4vSettings" type:"structure"`
5965
5966	// MPEG-2 TS container settings. These apply to outputs in a File output group
5967	// when the output's container (ContainerType) is MPEG-2 Transport Stream (M2TS).
5968	// In these assets, data is organized by the program map table (PMT). Each transport
5969	// stream program contains subsets of data, including audio, video, and metadata.
5970	// Each of these subsets of data has a numerical label called a packet identifier
5971	// (PID). Each transport stream program corresponds to one MediaConvert output.
5972	// The PMT lists the types of data in a program along with their PID. Downstream
5973	// systems and players use the program map table to look up the PID for each
5974	// type of data it accesses and then uses the PIDs to locate specific data within
5975	// the asset.
5976	M2tsSettings *M2tsSettings `locationName:"m2tsSettings" type:"structure"`
5977
5978	// Settings for TS segments in HLS
5979	M3u8Settings *M3u8Settings `locationName:"m3u8Settings" type:"structure"`
5980
5981	// Settings for MOV Container.
5982	MovSettings *MovSettings `locationName:"movSettings" type:"structure"`
5983
5984	// Settings for MP4 container. You can create audio-only AAC outputs with this
5985	// container.
5986	Mp4Settings *Mp4Settings `locationName:"mp4Settings" type:"structure"`
5987
5988	// Settings for MP4 segments in DASH
5989	MpdSettings *MpdSettings `locationName:"mpdSettings" type:"structure"`
5990
5991	// MXF settings
5992	MxfSettings *MxfSettings `locationName:"mxfSettings" type:"structure"`
5993}
5994
5995// String returns the string representation
5996func (s ContainerSettings) String() string {
5997	return awsutil.Prettify(s)
5998}
5999
6000// GoString returns the string representation
6001func (s ContainerSettings) GoString() string {
6002	return s.String()
6003}
6004
6005// Validate inspects the fields of the type to determine if they are valid.
6006func (s *ContainerSettings) Validate() error {
6007	invalidParams := request.ErrInvalidParams{Context: "ContainerSettings"}
6008	if s.M2tsSettings != nil {
6009		if err := s.M2tsSettings.Validate(); err != nil {
6010			invalidParams.AddNested("M2tsSettings", err.(request.ErrInvalidParams))
6011		}
6012	}
6013	if s.M3u8Settings != nil {
6014		if err := s.M3u8Settings.Validate(); err != nil {
6015			invalidParams.AddNested("M3u8Settings", err.(request.ErrInvalidParams))
6016		}
6017	}
6018
6019	if invalidParams.Len() > 0 {
6020		return invalidParams
6021	}
6022	return nil
6023}
6024
6025// SetCmfcSettings sets the CmfcSettings field's value.
6026func (s *ContainerSettings) SetCmfcSettings(v *CmfcSettings) *ContainerSettings {
6027	s.CmfcSettings = v
6028	return s
6029}
6030
6031// SetContainer sets the Container field's value.
6032func (s *ContainerSettings) SetContainer(v string) *ContainerSettings {
6033	s.Container = &v
6034	return s
6035}
6036
6037// SetF4vSettings sets the F4vSettings field's value.
6038func (s *ContainerSettings) SetF4vSettings(v *F4vSettings) *ContainerSettings {
6039	s.F4vSettings = v
6040	return s
6041}
6042
6043// SetM2tsSettings sets the M2tsSettings field's value.
6044func (s *ContainerSettings) SetM2tsSettings(v *M2tsSettings) *ContainerSettings {
6045	s.M2tsSettings = v
6046	return s
6047}
6048
6049// SetM3u8Settings sets the M3u8Settings field's value.
6050func (s *ContainerSettings) SetM3u8Settings(v *M3u8Settings) *ContainerSettings {
6051	s.M3u8Settings = v
6052	return s
6053}
6054
6055// SetMovSettings sets the MovSettings field's value.
6056func (s *ContainerSettings) SetMovSettings(v *MovSettings) *ContainerSettings {
6057	s.MovSettings = v
6058	return s
6059}
6060
6061// SetMp4Settings sets the Mp4Settings field's value.
6062func (s *ContainerSettings) SetMp4Settings(v *Mp4Settings) *ContainerSettings {
6063	s.Mp4Settings = v
6064	return s
6065}
6066
6067// SetMpdSettings sets the MpdSettings field's value.
6068func (s *ContainerSettings) SetMpdSettings(v *MpdSettings) *ContainerSettings {
6069	s.MpdSettings = v
6070	return s
6071}
6072
6073// SetMxfSettings sets the MxfSettings field's value.
6074func (s *ContainerSettings) SetMxfSettings(v *MxfSettings) *ContainerSettings {
6075	s.MxfSettings = v
6076	return s
6077}
6078
6079// Send your create job request with your job settings and IAM role. Optionally,
6080// include user metadata and the ARN for the queue.
6081type CreateJobInput struct {
6082	_ struct{} `type:"structure"`
6083
6084	// Optional. Accelerated transcoding can significantly speed up jobs with long,
6085	// visually complex content. Outputs that use this feature incur pro-tier pricing.
6086	// For information about feature limitations, see the AWS Elemental MediaConvert
6087	// User Guide.
6088	AccelerationSettings *AccelerationSettings `locationName:"accelerationSettings" type:"structure"`
6089
6090	// Optional. Choose a tag type that AWS Billing and Cost Management will use
6091	// to sort your AWS Elemental MediaConvert costs on any billing report that
6092	// you set up. Any transcoding outputs that don't have an associated tag will
6093	// appear in your billing report unsorted. If you don't choose a valid value
6094	// for this field, your job outputs will appear on the billing report unsorted.
6095	BillingTagsSource *string `locationName:"billingTagsSource" type:"string" enum:"BillingTagsSource"`
6096
6097	// Optional. Idempotency token for CreateJob operation.
6098	ClientRequestToken *string `locationName:"clientRequestToken" type:"string" idempotencyToken:"true"`
6099
6100	// Optional. Use queue hopping to avoid overly long waits in the backlog of
6101	// the queue that you submit your job to. Specify an alternate queue and the
6102	// maximum time that your job will wait in the initial queue before hopping.
6103	// For more information about this feature, see the AWS Elemental MediaConvert
6104	// User Guide.
6105	HopDestinations []*HopDestination `locationName:"hopDestinations" type:"list"`
6106
6107	// Optional. When you create a job, you can either specify a job template or
6108	// specify the transcoding settings individually.
6109	JobTemplate *string `locationName:"jobTemplate" type:"string"`
6110
6111	// Optional. Specify the relative priority for this job. In any given queue,
6112	// the service begins processing the job with the highest value first. When
6113	// more than one job has the same priority, the service begins processing the
6114	// job that you submitted first. If you don't specify a priority, the service
6115	// uses the default value 0.
6116	Priority *int64 `locationName:"priority" type:"integer"`
6117
6118	// Optional. When you create a job, you can specify a queue to send it to. If
6119	// you don't specify, the job will go to the default queue. For more about queues,
6120	// see the User Guide topic at https://docs.aws.amazon.com/mediaconvert/latest/ug/what-is.html.
6121	Queue *string `locationName:"queue" type:"string"`
6122
6123	// Required. The IAM role you use for creating this job. For details about permissions,
6124	// see the User Guide topic at the User Guide at https://docs.aws.amazon.com/mediaconvert/latest/ug/iam-role.html.
6125	//
6126	// Role is a required field
6127	Role *string `locationName:"role" type:"string" required:"true"`
6128
6129	// JobSettings contains all the transcode settings for a job.
6130	//
6131	// Settings is a required field
6132	Settings *JobSettings `locationName:"settings" type:"structure" required:"true"`
6133
6134	// Optional. Enable this setting when you run a test job to estimate how many
6135	// reserved transcoding slots (RTS) you need. When this is enabled, MediaConvert
6136	// runs your job from an on-demand queue with similar performance to what you
6137	// will see with one RTS in a reserved queue. This setting is disabled by default.
6138	SimulateReservedQueue *string `locationName:"simulateReservedQueue" type:"string" enum:"SimulateReservedQueue"`
6139
6140	// Optional. Specify how often MediaConvert sends STATUS_UPDATE events to Amazon
6141	// CloudWatch Events. Set the interval, in seconds, between status updates.
6142	// MediaConvert sends an update at this interval from the time the service begins
6143	// processing your job to the time it completes the transcode or encounters
6144	// an error.
6145	StatusUpdateInterval *string `locationName:"statusUpdateInterval" type:"string" enum:"StatusUpdateInterval"`
6146
6147	// Optional. The tags that you want to add to the resource. You can tag resources
6148	// with a key-value pair or with only a key. Use standard AWS tags on your job
6149	// for automatic integration with AWS services and for custom integrations and
6150	// workflows.
6151	Tags map[string]*string `locationName:"tags" type:"map"`
6152
6153	// Optional. User-defined metadata that you want to associate with an MediaConvert
6154	// job. You specify metadata in key/value pairs. Use only for existing integrations
6155	// or workflows that rely on job metadata tags. Otherwise, we recommend that
6156	// you use standard AWS tags.
6157	UserMetadata map[string]*string `locationName:"userMetadata" type:"map"`
6158}
6159
6160// String returns the string representation
6161func (s CreateJobInput) String() string {
6162	return awsutil.Prettify(s)
6163}
6164
6165// GoString returns the string representation
6166func (s CreateJobInput) GoString() string {
6167	return s.String()
6168}
6169
6170// Validate inspects the fields of the type to determine if they are valid.
6171func (s *CreateJobInput) Validate() error {
6172	invalidParams := request.ErrInvalidParams{Context: "CreateJobInput"}
6173	if s.Priority != nil && *s.Priority < -50 {
6174		invalidParams.Add(request.NewErrParamMinValue("Priority", -50))
6175	}
6176	if s.Role == nil {
6177		invalidParams.Add(request.NewErrParamRequired("Role"))
6178	}
6179	if s.Settings == nil {
6180		invalidParams.Add(request.NewErrParamRequired("Settings"))
6181	}
6182	if s.AccelerationSettings != nil {
6183		if err := s.AccelerationSettings.Validate(); err != nil {
6184			invalidParams.AddNested("AccelerationSettings", err.(request.ErrInvalidParams))
6185		}
6186	}
6187	if s.HopDestinations != nil {
6188		for i, v := range s.HopDestinations {
6189			if v == nil {
6190				continue
6191			}
6192			if err := v.Validate(); err != nil {
6193				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "HopDestinations", i), err.(request.ErrInvalidParams))
6194			}
6195		}
6196	}
6197	if s.Settings != nil {
6198		if err := s.Settings.Validate(); err != nil {
6199			invalidParams.AddNested("Settings", err.(request.ErrInvalidParams))
6200		}
6201	}
6202
6203	if invalidParams.Len() > 0 {
6204		return invalidParams
6205	}
6206	return nil
6207}
6208
6209// SetAccelerationSettings sets the AccelerationSettings field's value.
6210func (s *CreateJobInput) SetAccelerationSettings(v *AccelerationSettings) *CreateJobInput {
6211	s.AccelerationSettings = v
6212	return s
6213}
6214
6215// SetBillingTagsSource sets the BillingTagsSource field's value.
6216func (s *CreateJobInput) SetBillingTagsSource(v string) *CreateJobInput {
6217	s.BillingTagsSource = &v
6218	return s
6219}
6220
6221// SetClientRequestToken sets the ClientRequestToken field's value.
6222func (s *CreateJobInput) SetClientRequestToken(v string) *CreateJobInput {
6223	s.ClientRequestToken = &v
6224	return s
6225}
6226
6227// SetHopDestinations sets the HopDestinations field's value.
6228func (s *CreateJobInput) SetHopDestinations(v []*HopDestination) *CreateJobInput {
6229	s.HopDestinations = v
6230	return s
6231}
6232
6233// SetJobTemplate sets the JobTemplate field's value.
6234func (s *CreateJobInput) SetJobTemplate(v string) *CreateJobInput {
6235	s.JobTemplate = &v
6236	return s
6237}
6238
6239// SetPriority sets the Priority field's value.
6240func (s *CreateJobInput) SetPriority(v int64) *CreateJobInput {
6241	s.Priority = &v
6242	return s
6243}
6244
6245// SetQueue sets the Queue field's value.
6246func (s *CreateJobInput) SetQueue(v string) *CreateJobInput {
6247	s.Queue = &v
6248	return s
6249}
6250
6251// SetRole sets the Role field's value.
6252func (s *CreateJobInput) SetRole(v string) *CreateJobInput {
6253	s.Role = &v
6254	return s
6255}
6256
6257// SetSettings sets the Settings field's value.
6258func (s *CreateJobInput) SetSettings(v *JobSettings) *CreateJobInput {
6259	s.Settings = v
6260	return s
6261}
6262
6263// SetSimulateReservedQueue sets the SimulateReservedQueue field's value.
6264func (s *CreateJobInput) SetSimulateReservedQueue(v string) *CreateJobInput {
6265	s.SimulateReservedQueue = &v
6266	return s
6267}
6268
6269// SetStatusUpdateInterval sets the StatusUpdateInterval field's value.
6270func (s *CreateJobInput) SetStatusUpdateInterval(v string) *CreateJobInput {
6271	s.StatusUpdateInterval = &v
6272	return s
6273}
6274
6275// SetTags sets the Tags field's value.
6276func (s *CreateJobInput) SetTags(v map[string]*string) *CreateJobInput {
6277	s.Tags = v
6278	return s
6279}
6280
6281// SetUserMetadata sets the UserMetadata field's value.
6282func (s *CreateJobInput) SetUserMetadata(v map[string]*string) *CreateJobInput {
6283	s.UserMetadata = v
6284	return s
6285}
6286
6287// Successful create job requests will return the job JSON.
6288type CreateJobOutput struct {
6289	_ struct{} `type:"structure"`
6290
6291	// Each job converts an input file into an output file or files. For more information,
6292	// see the User Guide at https://docs.aws.amazon.com/mediaconvert/latest/ug/what-is.html
6293	Job *Job `locationName:"job" type:"structure"`
6294}
6295
6296// String returns the string representation
6297func (s CreateJobOutput) String() string {
6298	return awsutil.Prettify(s)
6299}
6300
6301// GoString returns the string representation
6302func (s CreateJobOutput) GoString() string {
6303	return s.String()
6304}
6305
6306// SetJob sets the Job field's value.
6307func (s *CreateJobOutput) SetJob(v *Job) *CreateJobOutput {
6308	s.Job = v
6309	return s
6310}
6311
6312// Send your create job template request with the name of the template and the
6313// JSON for the template. The template JSON should include everything in a valid
6314// job, except for input location and filename, IAM role, and user metadata.
6315type CreateJobTemplateInput struct {
6316	_ struct{} `type:"structure"`
6317
6318	// Accelerated transcoding can significantly speed up jobs with long, visually
6319	// complex content. Outputs that use this feature incur pro-tier pricing. For
6320	// information about feature limitations, see the AWS Elemental MediaConvert
6321	// User Guide.
6322	AccelerationSettings *AccelerationSettings `locationName:"accelerationSettings" type:"structure"`
6323
6324	// Optional. A category for the job template you are creating
6325	Category *string `locationName:"category" type:"string"`
6326
6327	// Optional. A description of the job template you are creating.
6328	Description *string `locationName:"description" type:"string"`
6329
6330	// Optional. Use queue hopping to avoid overly long waits in the backlog of
6331	// the queue that you submit your job to. Specify an alternate queue and the
6332	// maximum time that your job will wait in the initial queue before hopping.
6333	// For more information about this feature, see the AWS Elemental MediaConvert
6334	// User Guide.
6335	HopDestinations []*HopDestination `locationName:"hopDestinations" type:"list"`
6336
6337	// The name of the job template you are creating.
6338	//
6339	// Name is a required field
6340	Name *string `locationName:"name" type:"string" required:"true"`
6341
6342	// Specify the relative priority for this job. In any given queue, the service
6343	// begins processing the job with the highest value first. When more than one
6344	// job has the same priority, the service begins processing the job that you
6345	// submitted first. If you don't specify a priority, the service uses the default
6346	// value 0.
6347	Priority *int64 `locationName:"priority" type:"integer"`
6348
6349	// Optional. The queue that jobs created from this template are assigned to.
6350	// If you don't specify this, jobs will go to the default queue.
6351	Queue *string `locationName:"queue" type:"string"`
6352
6353	// JobTemplateSettings contains all the transcode settings saved in the template
6354	// that will be applied to jobs created from it.
6355	//
6356	// Settings is a required field
6357	Settings *JobTemplateSettings `locationName:"settings" type:"structure" required:"true"`
6358
6359	// Specify how often MediaConvert sends STATUS_UPDATE events to Amazon CloudWatch
6360	// Events. Set the interval, in seconds, between status updates. MediaConvert
6361	// sends an update at this interval from the time the service begins processing
6362	// your job to the time it completes the transcode or encounters an error.
6363	StatusUpdateInterval *string `locationName:"statusUpdateInterval" type:"string" enum:"StatusUpdateInterval"`
6364
6365	// The tags that you want to add to the resource. You can tag resources with
6366	// a key-value pair or with only a key.
6367	Tags map[string]*string `locationName:"tags" type:"map"`
6368}
6369
6370// String returns the string representation
6371func (s CreateJobTemplateInput) String() string {
6372	return awsutil.Prettify(s)
6373}
6374
6375// GoString returns the string representation
6376func (s CreateJobTemplateInput) GoString() string {
6377	return s.String()
6378}
6379
6380// Validate inspects the fields of the type to determine if they are valid.
6381func (s *CreateJobTemplateInput) Validate() error {
6382	invalidParams := request.ErrInvalidParams{Context: "CreateJobTemplateInput"}
6383	if s.Name == nil {
6384		invalidParams.Add(request.NewErrParamRequired("Name"))
6385	}
6386	if s.Priority != nil && *s.Priority < -50 {
6387		invalidParams.Add(request.NewErrParamMinValue("Priority", -50))
6388	}
6389	if s.Settings == nil {
6390		invalidParams.Add(request.NewErrParamRequired("Settings"))
6391	}
6392	if s.AccelerationSettings != nil {
6393		if err := s.AccelerationSettings.Validate(); err != nil {
6394			invalidParams.AddNested("AccelerationSettings", err.(request.ErrInvalidParams))
6395		}
6396	}
6397	if s.HopDestinations != nil {
6398		for i, v := range s.HopDestinations {
6399			if v == nil {
6400				continue
6401			}
6402			if err := v.Validate(); err != nil {
6403				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "HopDestinations", i), err.(request.ErrInvalidParams))
6404			}
6405		}
6406	}
6407	if s.Settings != nil {
6408		if err := s.Settings.Validate(); err != nil {
6409			invalidParams.AddNested("Settings", err.(request.ErrInvalidParams))
6410		}
6411	}
6412
6413	if invalidParams.Len() > 0 {
6414		return invalidParams
6415	}
6416	return nil
6417}
6418
6419// SetAccelerationSettings sets the AccelerationSettings field's value.
6420func (s *CreateJobTemplateInput) SetAccelerationSettings(v *AccelerationSettings) *CreateJobTemplateInput {
6421	s.AccelerationSettings = v
6422	return s
6423}
6424
6425// SetCategory sets the Category field's value.
6426func (s *CreateJobTemplateInput) SetCategory(v string) *CreateJobTemplateInput {
6427	s.Category = &v
6428	return s
6429}
6430
6431// SetDescription sets the Description field's value.
6432func (s *CreateJobTemplateInput) SetDescription(v string) *CreateJobTemplateInput {
6433	s.Description = &v
6434	return s
6435}
6436
6437// SetHopDestinations sets the HopDestinations field's value.
6438func (s *CreateJobTemplateInput) SetHopDestinations(v []*HopDestination) *CreateJobTemplateInput {
6439	s.HopDestinations = v
6440	return s
6441}
6442
6443// SetName sets the Name field's value.
6444func (s *CreateJobTemplateInput) SetName(v string) *CreateJobTemplateInput {
6445	s.Name = &v
6446	return s
6447}
6448
6449// SetPriority sets the Priority field's value.
6450func (s *CreateJobTemplateInput) SetPriority(v int64) *CreateJobTemplateInput {
6451	s.Priority = &v
6452	return s
6453}
6454
6455// SetQueue sets the Queue field's value.
6456func (s *CreateJobTemplateInput) SetQueue(v string) *CreateJobTemplateInput {
6457	s.Queue = &v
6458	return s
6459}
6460
6461// SetSettings sets the Settings field's value.
6462func (s *CreateJobTemplateInput) SetSettings(v *JobTemplateSettings) *CreateJobTemplateInput {
6463	s.Settings = v
6464	return s
6465}
6466
6467// SetStatusUpdateInterval sets the StatusUpdateInterval field's value.
6468func (s *CreateJobTemplateInput) SetStatusUpdateInterval(v string) *CreateJobTemplateInput {
6469	s.StatusUpdateInterval = &v
6470	return s
6471}
6472
6473// SetTags sets the Tags field's value.
6474func (s *CreateJobTemplateInput) SetTags(v map[string]*string) *CreateJobTemplateInput {
6475	s.Tags = v
6476	return s
6477}
6478
6479// Successful create job template requests will return the template JSON.
6480type CreateJobTemplateOutput struct {
6481	_ struct{} `type:"structure"`
6482
6483	// A job template is a pre-made set of encoding instructions that you can use
6484	// to quickly create a job.
6485	JobTemplate *JobTemplate `locationName:"jobTemplate" type:"structure"`
6486}
6487
6488// String returns the string representation
6489func (s CreateJobTemplateOutput) String() string {
6490	return awsutil.Prettify(s)
6491}
6492
6493// GoString returns the string representation
6494func (s CreateJobTemplateOutput) GoString() string {
6495	return s.String()
6496}
6497
6498// SetJobTemplate sets the JobTemplate field's value.
6499func (s *CreateJobTemplateOutput) SetJobTemplate(v *JobTemplate) *CreateJobTemplateOutput {
6500	s.JobTemplate = v
6501	return s
6502}
6503
6504// Send your create preset request with the name of the preset and the JSON
6505// for the output settings specified by the preset.
6506type CreatePresetInput struct {
6507	_ struct{} `type:"structure"`
6508
6509	// Optional. A category for the preset you are creating.
6510	Category *string `locationName:"category" type:"string"`
6511
6512	// Optional. A description of the preset you are creating.
6513	Description *string `locationName:"description" type:"string"`
6514
6515	// The name of the preset you are creating.
6516	//
6517	// Name is a required field
6518	Name *string `locationName:"name" type:"string" required:"true"`
6519
6520	// Settings for preset
6521	//
6522	// Settings is a required field
6523	Settings *PresetSettings `locationName:"settings" type:"structure" required:"true"`
6524
6525	// The tags that you want to add to the resource. You can tag resources with
6526	// a key-value pair or with only a key.
6527	Tags map[string]*string `locationName:"tags" type:"map"`
6528}
6529
6530// String returns the string representation
6531func (s CreatePresetInput) String() string {
6532	return awsutil.Prettify(s)
6533}
6534
6535// GoString returns the string representation
6536func (s CreatePresetInput) GoString() string {
6537	return s.String()
6538}
6539
6540// Validate inspects the fields of the type to determine if they are valid.
6541func (s *CreatePresetInput) Validate() error {
6542	invalidParams := request.ErrInvalidParams{Context: "CreatePresetInput"}
6543	if s.Name == nil {
6544		invalidParams.Add(request.NewErrParamRequired("Name"))
6545	}
6546	if s.Settings == nil {
6547		invalidParams.Add(request.NewErrParamRequired("Settings"))
6548	}
6549	if s.Settings != nil {
6550		if err := s.Settings.Validate(); err != nil {
6551			invalidParams.AddNested("Settings", err.(request.ErrInvalidParams))
6552		}
6553	}
6554
6555	if invalidParams.Len() > 0 {
6556		return invalidParams
6557	}
6558	return nil
6559}
6560
6561// SetCategory sets the Category field's value.
6562func (s *CreatePresetInput) SetCategory(v string) *CreatePresetInput {
6563	s.Category = &v
6564	return s
6565}
6566
6567// SetDescription sets the Description field's value.
6568func (s *CreatePresetInput) SetDescription(v string) *CreatePresetInput {
6569	s.Description = &v
6570	return s
6571}
6572
6573// SetName sets the Name field's value.
6574func (s *CreatePresetInput) SetName(v string) *CreatePresetInput {
6575	s.Name = &v
6576	return s
6577}
6578
6579// SetSettings sets the Settings field's value.
6580func (s *CreatePresetInput) SetSettings(v *PresetSettings) *CreatePresetInput {
6581	s.Settings = v
6582	return s
6583}
6584
6585// SetTags sets the Tags field's value.
6586func (s *CreatePresetInput) SetTags(v map[string]*string) *CreatePresetInput {
6587	s.Tags = v
6588	return s
6589}
6590
6591// Successful create preset requests will return the preset JSON.
6592type CreatePresetOutput struct {
6593	_ struct{} `type:"structure"`
6594
6595	// A preset is a collection of preconfigured media conversion settings that
6596	// you want MediaConvert to apply to the output during the conversion process.
6597	Preset *Preset `locationName:"preset" type:"structure"`
6598}
6599
6600// String returns the string representation
6601func (s CreatePresetOutput) String() string {
6602	return awsutil.Prettify(s)
6603}
6604
6605// GoString returns the string representation
6606func (s CreatePresetOutput) GoString() string {
6607	return s.String()
6608}
6609
6610// SetPreset sets the Preset field's value.
6611func (s *CreatePresetOutput) SetPreset(v *Preset) *CreatePresetOutput {
6612	s.Preset = v
6613	return s
6614}
6615
6616// Create an on-demand queue by sending a CreateQueue request with the name
6617// of the queue. Create a reserved queue by sending a CreateQueue request with
6618// the pricing plan set to RESERVED and with values specified for the settings
6619// under reservationPlanSettings. When you create a reserved queue, you enter
6620// into a 12-month commitment to purchase the RTS that you specify. You can't
6621// cancel this commitment.
6622type CreateQueueInput struct {
6623	_ struct{} `type:"structure"`
6624
6625	// Optional. A description of the queue that you are creating.
6626	Description *string `locationName:"description" type:"string"`
6627
6628	// The name of the queue that you are creating.
6629	//
6630	// Name is a required field
6631	Name *string `locationName:"name" type:"string" required:"true"`
6632
6633	// Specifies whether the pricing plan for the queue is on-demand or reserved.
6634	// For on-demand, you pay per minute, billed in increments of .01 minute. For
6635	// reserved, you pay for the transcoding capacity of the entire queue, regardless
6636	// of how much or how little you use it. Reserved pricing requires a 12-month
6637	// commitment. When you use the API to create a queue, the default is on-demand.
6638	PricingPlan *string `locationName:"pricingPlan" type:"string" enum:"PricingPlan"`
6639
6640	// Details about the pricing plan for your reserved queue. Required for reserved
6641	// queues and not applicable to on-demand queues.
6642	ReservationPlanSettings *ReservationPlanSettings `locationName:"reservationPlanSettings" type:"structure"`
6643
6644	// Initial state of the queue. If you create a paused queue, then jobs in that
6645	// queue won't begin.
6646	Status *string `locationName:"status" type:"string" enum:"QueueStatus"`
6647
6648	// The tags that you want to add to the resource. You can tag resources with
6649	// a key-value pair or with only a key.
6650	Tags map[string]*string `locationName:"tags" type:"map"`
6651}
6652
6653// String returns the string representation
6654func (s CreateQueueInput) String() string {
6655	return awsutil.Prettify(s)
6656}
6657
6658// GoString returns the string representation
6659func (s CreateQueueInput) GoString() string {
6660	return s.String()
6661}
6662
6663// Validate inspects the fields of the type to determine if they are valid.
6664func (s *CreateQueueInput) Validate() error {
6665	invalidParams := request.ErrInvalidParams{Context: "CreateQueueInput"}
6666	if s.Name == nil {
6667		invalidParams.Add(request.NewErrParamRequired("Name"))
6668	}
6669	if s.ReservationPlanSettings != nil {
6670		if err := s.ReservationPlanSettings.Validate(); err != nil {
6671			invalidParams.AddNested("ReservationPlanSettings", err.(request.ErrInvalidParams))
6672		}
6673	}
6674
6675	if invalidParams.Len() > 0 {
6676		return invalidParams
6677	}
6678	return nil
6679}
6680
6681// SetDescription sets the Description field's value.
6682func (s *CreateQueueInput) SetDescription(v string) *CreateQueueInput {
6683	s.Description = &v
6684	return s
6685}
6686
6687// SetName sets the Name field's value.
6688func (s *CreateQueueInput) SetName(v string) *CreateQueueInput {
6689	s.Name = &v
6690	return s
6691}
6692
6693// SetPricingPlan sets the PricingPlan field's value.
6694func (s *CreateQueueInput) SetPricingPlan(v string) *CreateQueueInput {
6695	s.PricingPlan = &v
6696	return s
6697}
6698
6699// SetReservationPlanSettings sets the ReservationPlanSettings field's value.
6700func (s *CreateQueueInput) SetReservationPlanSettings(v *ReservationPlanSettings) *CreateQueueInput {
6701	s.ReservationPlanSettings = v
6702	return s
6703}
6704
6705// SetStatus sets the Status field's value.
6706func (s *CreateQueueInput) SetStatus(v string) *CreateQueueInput {
6707	s.Status = &v
6708	return s
6709}
6710
6711// SetTags sets the Tags field's value.
6712func (s *CreateQueueInput) SetTags(v map[string]*string) *CreateQueueInput {
6713	s.Tags = v
6714	return s
6715}
6716
6717// Successful create queue requests return the name of the queue that you just
6718// created and information about it.
6719type CreateQueueOutput struct {
6720	_ struct{} `type:"structure"`
6721
6722	// You can use queues to manage the resources that are available to your AWS
6723	// account for running multiple transcoding jobs at the same time. If you don't
6724	// specify a queue, the service sends all jobs through the default queue. For
6725	// more information, see https://docs.aws.amazon.com/mediaconvert/latest/ug/working-with-queues.html.
6726	Queue *Queue `locationName:"queue" type:"structure"`
6727}
6728
6729// String returns the string representation
6730func (s CreateQueueOutput) String() string {
6731	return awsutil.Prettify(s)
6732}
6733
6734// GoString returns the string representation
6735func (s CreateQueueOutput) GoString() string {
6736	return s.String()
6737}
6738
6739// SetQueue sets the Queue field's value.
6740func (s *CreateQueueOutput) SetQueue(v *Queue) *CreateQueueOutput {
6741	s.Queue = v
6742	return s
6743}
6744
6745// Specify the details for each additional DASH manifest that you want the service
6746// to generate for this output group. Each manifest can reference a different
6747// subset of outputs in the group.
6748type DashAdditionalManifest struct {
6749	_ struct{} `type:"structure"`
6750
6751	// Specify a name modifier that the service adds to the name of this manifest
6752	// to make it different from the file names of the other main manifests in the
6753	// output group. For example, say that the default main manifest for your DASH
6754	// group is film-name.mpd. If you enter "-no-premium" for this setting, then
6755	// the file name the service generates for this top-level manifest is film-name-no-premium.mpd.
6756	ManifestNameModifier *string `locationName:"manifestNameModifier" min:"1" type:"string"`
6757
6758	// Specify the outputs that you want this additional top-level manifest to reference.
6759	SelectedOutputs []*string `locationName:"selectedOutputs" type:"list"`
6760}
6761
6762// String returns the string representation
6763func (s DashAdditionalManifest) String() string {
6764	return awsutil.Prettify(s)
6765}
6766
6767// GoString returns the string representation
6768func (s DashAdditionalManifest) GoString() string {
6769	return s.String()
6770}
6771
6772// Validate inspects the fields of the type to determine if they are valid.
6773func (s *DashAdditionalManifest) Validate() error {
6774	invalidParams := request.ErrInvalidParams{Context: "DashAdditionalManifest"}
6775	if s.ManifestNameModifier != nil && len(*s.ManifestNameModifier) < 1 {
6776		invalidParams.Add(request.NewErrParamMinLen("ManifestNameModifier", 1))
6777	}
6778
6779	if invalidParams.Len() > 0 {
6780		return invalidParams
6781	}
6782	return nil
6783}
6784
6785// SetManifestNameModifier sets the ManifestNameModifier field's value.
6786func (s *DashAdditionalManifest) SetManifestNameModifier(v string) *DashAdditionalManifest {
6787	s.ManifestNameModifier = &v
6788	return s
6789}
6790
6791// SetSelectedOutputs sets the SelectedOutputs field's value.
6792func (s *DashAdditionalManifest) SetSelectedOutputs(v []*string) *DashAdditionalManifest {
6793	s.SelectedOutputs = v
6794	return s
6795}
6796
6797// Specifies DRM settings for DASH outputs.
6798type DashIsoEncryptionSettings struct {
6799	_ struct{} `type:"structure"`
6800
6801	// This setting can improve the compatibility of your output with video players
6802	// on obsolete devices. It applies only to DASH H.264 outputs with DRM encryption.
6803	// Choose Unencrypted SEI (UNENCRYPTED_SEI) only to correct problems with playback
6804	// on older devices. Otherwise, keep the default setting CENC v1 (CENC_V1).
6805	// If you choose Unencrypted SEI, for that output, the service will exclude
6806	// the access unit delimiter and will leave the SEI NAL units unencrypted.
6807	PlaybackDeviceCompatibility *string `locationName:"playbackDeviceCompatibility" type:"string" enum:"DashIsoPlaybackDeviceCompatibility"`
6808
6809	// If your output group type is HLS, DASH, or Microsoft Smooth, use these settings
6810	// when doing DRM encryption with a SPEKE-compliant key provider. If your output
6811	// group type is CMAF, use the SpekeKeyProviderCmaf settings instead.
6812	SpekeKeyProvider *SpekeKeyProvider `locationName:"spekeKeyProvider" type:"structure"`
6813}
6814
6815// String returns the string representation
6816func (s DashIsoEncryptionSettings) String() string {
6817	return awsutil.Prettify(s)
6818}
6819
6820// GoString returns the string representation
6821func (s DashIsoEncryptionSettings) GoString() string {
6822	return s.String()
6823}
6824
6825// SetPlaybackDeviceCompatibility sets the PlaybackDeviceCompatibility field's value.
6826func (s *DashIsoEncryptionSettings) SetPlaybackDeviceCompatibility(v string) *DashIsoEncryptionSettings {
6827	s.PlaybackDeviceCompatibility = &v
6828	return s
6829}
6830
6831// SetSpekeKeyProvider sets the SpekeKeyProvider field's value.
6832func (s *DashIsoEncryptionSettings) SetSpekeKeyProvider(v *SpekeKeyProvider) *DashIsoEncryptionSettings {
6833	s.SpekeKeyProvider = v
6834	return s
6835}
6836
6837// Required when you set (Type) under (OutputGroups)>(OutputGroupSettings) to
6838// DASH_ISO_GROUP_SETTINGS.
6839type DashIsoGroupSettings struct {
6840	_ struct{} `type:"structure"`
6841
6842	// By default, the service creates one .mpd DASH manifest for each DASH ISO
6843	// output group in your job. This default manifest references every output in
6844	// the output group. To create additional DASH manifests that reference a subset
6845	// of the outputs in the output group, specify a list of them here.
6846	AdditionalManifests []*DashAdditionalManifest `locationName:"additionalManifests" type:"list"`
6847
6848	// A partial URI prefix that will be put in the manifest (.mpd) file at the
6849	// top level BaseURL element. Can be used if streams are delivered from a different
6850	// URL than the manifest file.
6851	BaseUrl *string `locationName:"baseUrl" type:"string"`
6852
6853	// Use Destination (Destination) to specify the S3 output location and the output
6854	// filename base. Destination accepts format identifiers. If you do not specify
6855	// the base filename in the URI, the service will use the filename of the input
6856	// file. If your job has multiple inputs, the service uses the filename of the
6857	// first input file.
6858	Destination *string `locationName:"destination" type:"string"`
6859
6860	// Settings associated with the destination. Will vary based on the type of
6861	// destination
6862	DestinationSettings *DestinationSettings `locationName:"destinationSettings" type:"structure"`
6863
6864	// DRM settings.
6865	Encryption *DashIsoEncryptionSettings `locationName:"encryption" type:"structure"`
6866
6867	// Length of fragments to generate (in seconds). Fragment length must be compatible
6868	// with GOP size and Framerate. Note that fragments will end on the next keyframe
6869	// after this number of seconds, so actual fragment length may be longer. When
6870	// Emit Single File is checked, the fragmentation is internal to a single output
6871	// file and it does not cause the creation of many output files as in other
6872	// output types.
6873	FragmentLength *int64 `locationName:"fragmentLength" min:"1" type:"integer"`
6874
6875	// Supports HbbTV specification as indicated
6876	HbbtvCompliance *string `locationName:"hbbtvCompliance" type:"string" enum:"DashIsoHbbtvCompliance"`
6877
6878	// Minimum time of initially buffered media that is needed to ensure smooth
6879	// playout.
6880	MinBufferTime *int64 `locationName:"minBufferTime" type:"integer"`
6881
6882	// Keep this setting at the default value of 0, unless you are troubleshooting
6883	// a problem with how devices play back the end of your video asset. If you
6884	// know that player devices are hanging on the final segment of your video because
6885	// the length of your final segment is too short, use this setting to specify
6886	// a minimum final segment length, in seconds. Choose a value that is greater
6887	// than or equal to 1 and less than your segment length. When you specify a
6888	// value for this setting, the encoder will combine any final segment that is
6889	// shorter than the length that you specify with the previous segment. For example,
6890	// your segment length is 3 seconds and your final segment is .5 seconds without
6891	// a minimum final segment length; when you set the minimum final segment length
6892	// to 1, your final segment is 3.5 seconds.
6893	MinFinalSegmentLength *float64 `locationName:"minFinalSegmentLength" type:"double"`
6894
6895	// Specify whether your DASH profile is on-demand or main. When you choose Main
6896	// profile (MAIN_PROFILE), the service signals urn:mpeg:dash:profile:isoff-main:2011
6897	// in your .mpd DASH manifest. When you choose On-demand (ON_DEMAND_PROFILE),
6898	// the service signals urn:mpeg:dash:profile:isoff-on-demand:2011 in your .mpd.
6899	// When you choose On-demand, you must also set the output group setting Segment
6900	// control (SegmentControl) to Single file (SINGLE_FILE).
6901	MpdProfile *string `locationName:"mpdProfile" type:"string" enum:"DashIsoMpdProfile"`
6902
6903	// When set to SINGLE_FILE, a single output file is generated, which is internally
6904	// segmented using the Fragment Length and Segment Length. When set to SEGMENTED_FILES,
6905	// separate segment files will be created.
6906	SegmentControl *string `locationName:"segmentControl" type:"string" enum:"DashIsoSegmentControl"`
6907
6908	// Length of mpd segments to create (in seconds). Note that segments will end
6909	// on the next keyframe after this number of seconds, so actual segment length
6910	// may be longer. When Emit Single File is checked, the segmentation is internal
6911	// to a single output file and it does not cause the creation of many output
6912	// files as in other output types.
6913	SegmentLength *int64 `locationName:"segmentLength" min:"1" type:"integer"`
6914
6915	// If you get an HTTP error in the 400 range when you play back your DASH output,
6916	// enable this setting and run your transcoding job again. When you enable this
6917	// setting, the service writes precise segment durations in the DASH manifest.
6918	// The segment duration information appears inside the SegmentTimeline element,
6919	// inside SegmentTemplate at the Representation level. When you don't enable
6920	// this setting, the service writes approximate segment durations in your DASH
6921	// manifest.
6922	WriteSegmentTimelineInRepresentation *string `locationName:"writeSegmentTimelineInRepresentation" type:"string" enum:"DashIsoWriteSegmentTimelineInRepresentation"`
6923}
6924
6925// String returns the string representation
6926func (s DashIsoGroupSettings) String() string {
6927	return awsutil.Prettify(s)
6928}
6929
6930// GoString returns the string representation
6931func (s DashIsoGroupSettings) GoString() string {
6932	return s.String()
6933}
6934
6935// Validate inspects the fields of the type to determine if they are valid.
6936func (s *DashIsoGroupSettings) Validate() error {
6937	invalidParams := request.ErrInvalidParams{Context: "DashIsoGroupSettings"}
6938	if s.FragmentLength != nil && *s.FragmentLength < 1 {
6939		invalidParams.Add(request.NewErrParamMinValue("FragmentLength", 1))
6940	}
6941	if s.SegmentLength != nil && *s.SegmentLength < 1 {
6942		invalidParams.Add(request.NewErrParamMinValue("SegmentLength", 1))
6943	}
6944	if s.AdditionalManifests != nil {
6945		for i, v := range s.AdditionalManifests {
6946			if v == nil {
6947				continue
6948			}
6949			if err := v.Validate(); err != nil {
6950				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "AdditionalManifests", i), err.(request.ErrInvalidParams))
6951			}
6952		}
6953	}
6954
6955	if invalidParams.Len() > 0 {
6956		return invalidParams
6957	}
6958	return nil
6959}
6960
6961// SetAdditionalManifests sets the AdditionalManifests field's value.
6962func (s *DashIsoGroupSettings) SetAdditionalManifests(v []*DashAdditionalManifest) *DashIsoGroupSettings {
6963	s.AdditionalManifests = v
6964	return s
6965}
6966
6967// SetBaseUrl sets the BaseUrl field's value.
6968func (s *DashIsoGroupSettings) SetBaseUrl(v string) *DashIsoGroupSettings {
6969	s.BaseUrl = &v
6970	return s
6971}
6972
6973// SetDestination sets the Destination field's value.
6974func (s *DashIsoGroupSettings) SetDestination(v string) *DashIsoGroupSettings {
6975	s.Destination = &v
6976	return s
6977}
6978
6979// SetDestinationSettings sets the DestinationSettings field's value.
6980func (s *DashIsoGroupSettings) SetDestinationSettings(v *DestinationSettings) *DashIsoGroupSettings {
6981	s.DestinationSettings = v
6982	return s
6983}
6984
6985// SetEncryption sets the Encryption field's value.
6986func (s *DashIsoGroupSettings) SetEncryption(v *DashIsoEncryptionSettings) *DashIsoGroupSettings {
6987	s.Encryption = v
6988	return s
6989}
6990
6991// SetFragmentLength sets the FragmentLength field's value.
6992func (s *DashIsoGroupSettings) SetFragmentLength(v int64) *DashIsoGroupSettings {
6993	s.FragmentLength = &v
6994	return s
6995}
6996
6997// SetHbbtvCompliance sets the HbbtvCompliance field's value.
6998func (s *DashIsoGroupSettings) SetHbbtvCompliance(v string) *DashIsoGroupSettings {
6999	s.HbbtvCompliance = &v
7000	return s
7001}
7002
7003// SetMinBufferTime sets the MinBufferTime field's value.
7004func (s *DashIsoGroupSettings) SetMinBufferTime(v int64) *DashIsoGroupSettings {
7005	s.MinBufferTime = &v
7006	return s
7007}
7008
7009// SetMinFinalSegmentLength sets the MinFinalSegmentLength field's value.
7010func (s *DashIsoGroupSettings) SetMinFinalSegmentLength(v float64) *DashIsoGroupSettings {
7011	s.MinFinalSegmentLength = &v
7012	return s
7013}
7014
7015// SetMpdProfile sets the MpdProfile field's value.
7016func (s *DashIsoGroupSettings) SetMpdProfile(v string) *DashIsoGroupSettings {
7017	s.MpdProfile = &v
7018	return s
7019}
7020
7021// SetSegmentControl sets the SegmentControl field's value.
7022func (s *DashIsoGroupSettings) SetSegmentControl(v string) *DashIsoGroupSettings {
7023	s.SegmentControl = &v
7024	return s
7025}
7026
7027// SetSegmentLength sets the SegmentLength field's value.
7028func (s *DashIsoGroupSettings) SetSegmentLength(v int64) *DashIsoGroupSettings {
7029	s.SegmentLength = &v
7030	return s
7031}
7032
7033// SetWriteSegmentTimelineInRepresentation sets the WriteSegmentTimelineInRepresentation field's value.
7034func (s *DashIsoGroupSettings) SetWriteSegmentTimelineInRepresentation(v string) *DashIsoGroupSettings {
7035	s.WriteSegmentTimelineInRepresentation = &v
7036	return s
7037}
7038
7039// Settings for deinterlacer
7040type Deinterlacer struct {
7041	_ struct{} `type:"structure"`
7042
7043	// Only applies when you set Deinterlacer (DeinterlaceMode) to Deinterlace (DEINTERLACE)
7044	// or Adaptive (ADAPTIVE). Motion adaptive interpolate (INTERPOLATE) produces
7045	// sharper pictures, while blend (BLEND) produces smoother motion. Use (INTERPOLATE_TICKER)
7046	// OR (BLEND_TICKER) if your source file includes a ticker, such as a scrolling
7047	// headline at the bottom of the frame.
7048	Algorithm *string `locationName:"algorithm" type:"string" enum:"DeinterlaceAlgorithm"`
7049
7050	// - When set to NORMAL (default), the deinterlacer does not convert frames
7051	// that are tagged in metadata as progressive. It will only convert those that
7052	// are tagged as some other type. - When set to FORCE_ALL_FRAMES, the deinterlacer
7053	// converts every frame to progressive - even those that are already tagged
7054	// as progressive. Turn Force mode on only if there is a good chance that the
7055	// metadata has tagged frames as progressive when they are not progressive.
7056	// Do not turn on otherwise; processing frames that are already progressive
7057	// into progressive will probably result in lower quality video.
7058	Control *string `locationName:"control" type:"string" enum:"DeinterlacerControl"`
7059
7060	// Use Deinterlacer (DeinterlaceMode) to choose how the service will do deinterlacing.
7061	// Default is Deinterlace. - Deinterlace converts interlaced to progressive.
7062	// - Inverse telecine converts Hard Telecine 29.97i to progressive 23.976p.
7063	// - Adaptive auto-detects and converts to progressive.
7064	Mode *string `locationName:"mode" type:"string" enum:"DeinterlacerMode"`
7065}
7066
7067// String returns the string representation
7068func (s Deinterlacer) String() string {
7069	return awsutil.Prettify(s)
7070}
7071
7072// GoString returns the string representation
7073func (s Deinterlacer) GoString() string {
7074	return s.String()
7075}
7076
7077// SetAlgorithm sets the Algorithm field's value.
7078func (s *Deinterlacer) SetAlgorithm(v string) *Deinterlacer {
7079	s.Algorithm = &v
7080	return s
7081}
7082
7083// SetControl sets the Control field's value.
7084func (s *Deinterlacer) SetControl(v string) *Deinterlacer {
7085	s.Control = &v
7086	return s
7087}
7088
7089// SetMode sets the Mode field's value.
7090func (s *Deinterlacer) SetMode(v string) *Deinterlacer {
7091	s.Mode = &v
7092	return s
7093}
7094
7095// Delete a job template by sending a request with the job template name
7096type DeleteJobTemplateInput struct {
7097	_ struct{} `type:"structure"`
7098
7099	// The name of the job template to be deleted.
7100	//
7101	// Name is a required field
7102	Name *string `location:"uri" locationName:"name" type:"string" required:"true"`
7103}
7104
7105// String returns the string representation
7106func (s DeleteJobTemplateInput) String() string {
7107	return awsutil.Prettify(s)
7108}
7109
7110// GoString returns the string representation
7111func (s DeleteJobTemplateInput) GoString() string {
7112	return s.String()
7113}
7114
7115// Validate inspects the fields of the type to determine if they are valid.
7116func (s *DeleteJobTemplateInput) Validate() error {
7117	invalidParams := request.ErrInvalidParams{Context: "DeleteJobTemplateInput"}
7118	if s.Name == nil {
7119		invalidParams.Add(request.NewErrParamRequired("Name"))
7120	}
7121	if s.Name != nil && len(*s.Name) < 1 {
7122		invalidParams.Add(request.NewErrParamMinLen("Name", 1))
7123	}
7124
7125	if invalidParams.Len() > 0 {
7126		return invalidParams
7127	}
7128	return nil
7129}
7130
7131// SetName sets the Name field's value.
7132func (s *DeleteJobTemplateInput) SetName(v string) *DeleteJobTemplateInput {
7133	s.Name = &v
7134	return s
7135}
7136
7137// Delete job template requests will return an OK message or error message with
7138// an empty body.
7139type DeleteJobTemplateOutput struct {
7140	_ struct{} `type:"structure"`
7141}
7142
7143// String returns the string representation
7144func (s DeleteJobTemplateOutput) String() string {
7145	return awsutil.Prettify(s)
7146}
7147
7148// GoString returns the string representation
7149func (s DeleteJobTemplateOutput) GoString() string {
7150	return s.String()
7151}
7152
7153// Delete a preset by sending a request with the preset name
7154type DeletePresetInput struct {
7155	_ struct{} `type:"structure"`
7156
7157	// The name of the preset to be deleted.
7158	//
7159	// Name is a required field
7160	Name *string `location:"uri" locationName:"name" type:"string" required:"true"`
7161}
7162
7163// String returns the string representation
7164func (s DeletePresetInput) String() string {
7165	return awsutil.Prettify(s)
7166}
7167
7168// GoString returns the string representation
7169func (s DeletePresetInput) GoString() string {
7170	return s.String()
7171}
7172
7173// Validate inspects the fields of the type to determine if they are valid.
7174func (s *DeletePresetInput) Validate() error {
7175	invalidParams := request.ErrInvalidParams{Context: "DeletePresetInput"}
7176	if s.Name == nil {
7177		invalidParams.Add(request.NewErrParamRequired("Name"))
7178	}
7179	if s.Name != nil && len(*s.Name) < 1 {
7180		invalidParams.Add(request.NewErrParamMinLen("Name", 1))
7181	}
7182
7183	if invalidParams.Len() > 0 {
7184		return invalidParams
7185	}
7186	return nil
7187}
7188
7189// SetName sets the Name field's value.
7190func (s *DeletePresetInput) SetName(v string) *DeletePresetInput {
7191	s.Name = &v
7192	return s
7193}
7194
7195// Delete preset requests will return an OK message or error message with an
7196// empty body.
7197type DeletePresetOutput struct {
7198	_ struct{} `type:"structure"`
7199}
7200
7201// String returns the string representation
7202func (s DeletePresetOutput) String() string {
7203	return awsutil.Prettify(s)
7204}
7205
7206// GoString returns the string representation
7207func (s DeletePresetOutput) GoString() string {
7208	return s.String()
7209}
7210
7211// Delete a queue by sending a request with the queue name. You can't delete
7212// a queue with an active pricing plan or one that has unprocessed jobs in it.
7213type DeleteQueueInput struct {
7214	_ struct{} `type:"structure"`
7215
7216	// The name of the queue that you want to delete.
7217	//
7218	// Name is a required field
7219	Name *string `location:"uri" locationName:"name" type:"string" required:"true"`
7220}
7221
7222// String returns the string representation
7223func (s DeleteQueueInput) String() string {
7224	return awsutil.Prettify(s)
7225}
7226
7227// GoString returns the string representation
7228func (s DeleteQueueInput) GoString() string {
7229	return s.String()
7230}
7231
7232// Validate inspects the fields of the type to determine if they are valid.
7233func (s *DeleteQueueInput) Validate() error {
7234	invalidParams := request.ErrInvalidParams{Context: "DeleteQueueInput"}
7235	if s.Name == nil {
7236		invalidParams.Add(request.NewErrParamRequired("Name"))
7237	}
7238	if s.Name != nil && len(*s.Name) < 1 {
7239		invalidParams.Add(request.NewErrParamMinLen("Name", 1))
7240	}
7241
7242	if invalidParams.Len() > 0 {
7243		return invalidParams
7244	}
7245	return nil
7246}
7247
7248// SetName sets the Name field's value.
7249func (s *DeleteQueueInput) SetName(v string) *DeleteQueueInput {
7250	s.Name = &v
7251	return s
7252}
7253
7254// Delete queue requests return an OK message or error message with an empty
7255// body.
7256type DeleteQueueOutput struct {
7257	_ struct{} `type:"structure"`
7258}
7259
7260// String returns the string representation
7261func (s DeleteQueueOutput) String() string {
7262	return awsutil.Prettify(s)
7263}
7264
7265// GoString returns the string representation
7266func (s DeleteQueueOutput) GoString() string {
7267	return s.String()
7268}
7269
7270// Send an request with an empty body to the regional API endpoint to get your
7271// account API endpoint.
7272type DescribeEndpointsInput struct {
7273	_ struct{} `type:"structure"`
7274
7275	// Optional. Max number of endpoints, up to twenty, that will be returned at
7276	// one time.
7277	MaxResults *int64 `locationName:"maxResults" type:"integer"`
7278
7279	// Optional field, defaults to DEFAULT. Specify DEFAULT for this operation to
7280	// return your endpoints if any exist, or to create an endpoint for you and
7281	// return it if one doesn't already exist. Specify GET_ONLY to return your endpoints
7282	// if any exist, or an empty list if none exist.
7283	Mode *string `locationName:"mode" type:"string" enum:"DescribeEndpointsMode"`
7284
7285	// Use this string, provided with the response to a previous request, to request
7286	// the next batch of endpoints.
7287	NextToken *string `locationName:"nextToken" type:"string"`
7288}
7289
7290// String returns the string representation
7291func (s DescribeEndpointsInput) String() string {
7292	return awsutil.Prettify(s)
7293}
7294
7295// GoString returns the string representation
7296func (s DescribeEndpointsInput) GoString() string {
7297	return s.String()
7298}
7299
7300// SetMaxResults sets the MaxResults field's value.
7301func (s *DescribeEndpointsInput) SetMaxResults(v int64) *DescribeEndpointsInput {
7302	s.MaxResults = &v
7303	return s
7304}
7305
7306// SetMode sets the Mode field's value.
7307func (s *DescribeEndpointsInput) SetMode(v string) *DescribeEndpointsInput {
7308	s.Mode = &v
7309	return s
7310}
7311
7312// SetNextToken sets the NextToken field's value.
7313func (s *DescribeEndpointsInput) SetNextToken(v string) *DescribeEndpointsInput {
7314	s.NextToken = &v
7315	return s
7316}
7317
7318// Successful describe endpoints requests will return your account API endpoint.
7319type DescribeEndpointsOutput struct {
7320	_ struct{} `type:"structure"`
7321
7322	// List of endpoints
7323	Endpoints []*Endpoint `locationName:"endpoints" type:"list"`
7324
7325	// Use this string to request the next batch of endpoints.
7326	NextToken *string `locationName:"nextToken" type:"string"`
7327}
7328
7329// String returns the string representation
7330func (s DescribeEndpointsOutput) String() string {
7331	return awsutil.Prettify(s)
7332}
7333
7334// GoString returns the string representation
7335func (s DescribeEndpointsOutput) GoString() string {
7336	return s.String()
7337}
7338
7339// SetEndpoints sets the Endpoints field's value.
7340func (s *DescribeEndpointsOutput) SetEndpoints(v []*Endpoint) *DescribeEndpointsOutput {
7341	s.Endpoints = v
7342	return s
7343}
7344
7345// SetNextToken sets the NextToken field's value.
7346func (s *DescribeEndpointsOutput) SetNextToken(v string) *DescribeEndpointsOutput {
7347	s.NextToken = &v
7348	return s
7349}
7350
7351// Settings associated with the destination. Will vary based on the type of
7352// destination
7353type DestinationSettings struct {
7354	_ struct{} `type:"structure"`
7355
7356	// Settings associated with S3 destination
7357	S3Settings *S3DestinationSettings `locationName:"s3Settings" type:"structure"`
7358}
7359
7360// String returns the string representation
7361func (s DestinationSettings) String() string {
7362	return awsutil.Prettify(s)
7363}
7364
7365// GoString returns the string representation
7366func (s DestinationSettings) GoString() string {
7367	return s.String()
7368}
7369
7370// SetS3Settings sets the S3Settings field's value.
7371func (s *DestinationSettings) SetS3Settings(v *S3DestinationSettings) *DestinationSettings {
7372	s.S3Settings = v
7373	return s
7374}
7375
7376// Removes an association between the Amazon Resource Name (ARN) of an AWS Certificate
7377// Manager (ACM) certificate and an AWS Elemental MediaConvert resource.
7378type DisassociateCertificateInput struct {
7379	_ struct{} `type:"structure"`
7380
7381	// The ARN of the ACM certificate that you want to disassociate from your MediaConvert
7382	// resource.
7383	//
7384	// Arn is a required field
7385	Arn *string `location:"uri" locationName:"arn" type:"string" required:"true"`
7386}
7387
7388// String returns the string representation
7389func (s DisassociateCertificateInput) String() string {
7390	return awsutil.Prettify(s)
7391}
7392
7393// GoString returns the string representation
7394func (s DisassociateCertificateInput) GoString() string {
7395	return s.String()
7396}
7397
7398// Validate inspects the fields of the type to determine if they are valid.
7399func (s *DisassociateCertificateInput) Validate() error {
7400	invalidParams := request.ErrInvalidParams{Context: "DisassociateCertificateInput"}
7401	if s.Arn == nil {
7402		invalidParams.Add(request.NewErrParamRequired("Arn"))
7403	}
7404	if s.Arn != nil && len(*s.Arn) < 1 {
7405		invalidParams.Add(request.NewErrParamMinLen("Arn", 1))
7406	}
7407
7408	if invalidParams.Len() > 0 {
7409		return invalidParams
7410	}
7411	return nil
7412}
7413
7414// SetArn sets the Arn field's value.
7415func (s *DisassociateCertificateInput) SetArn(v string) *DisassociateCertificateInput {
7416	s.Arn = &v
7417	return s
7418}
7419
7420// Successful disassociation of Certificate Manager Amazon Resource Name (ARN)
7421// with Mediaconvert returns an OK message.
7422type DisassociateCertificateOutput struct {
7423	_ struct{} `type:"structure"`
7424}
7425
7426// String returns the string representation
7427func (s DisassociateCertificateOutput) String() string {
7428	return awsutil.Prettify(s)
7429}
7430
7431// GoString returns the string representation
7432func (s DisassociateCertificateOutput) GoString() string {
7433	return s.String()
7434}
7435
7436// Settings for Dolby Vision
7437type DolbyVision struct {
7438	_ struct{} `type:"structure"`
7439
7440	// Use these settings when you set DolbyVisionLevel6Mode to SPECIFY to override
7441	// the MaxCLL and MaxFALL values in your input with new values.
7442	L6Metadata *DolbyVisionLevel6Metadata `locationName:"l6Metadata" type:"structure"`
7443
7444	// Use Dolby Vision Mode to choose how the service will handle Dolby Vision
7445	// MaxCLL and MaxFALL properies.
7446	L6Mode *string `locationName:"l6Mode" type:"string" enum:"DolbyVisionLevel6Mode"`
7447
7448	// In the current MediaConvert implementation, the Dolby Vision profile is always
7449	// 5 (PROFILE_5). Therefore, all of your inputs must contain Dolby Vision frame
7450	// interleaved data.
7451	Profile *string `locationName:"profile" type:"string" enum:"DolbyVisionProfile"`
7452}
7453
7454// String returns the string representation
7455func (s DolbyVision) String() string {
7456	return awsutil.Prettify(s)
7457}
7458
7459// GoString returns the string representation
7460func (s DolbyVision) GoString() string {
7461	return s.String()
7462}
7463
7464// SetL6Metadata sets the L6Metadata field's value.
7465func (s *DolbyVision) SetL6Metadata(v *DolbyVisionLevel6Metadata) *DolbyVision {
7466	s.L6Metadata = v
7467	return s
7468}
7469
7470// SetL6Mode sets the L6Mode field's value.
7471func (s *DolbyVision) SetL6Mode(v string) *DolbyVision {
7472	s.L6Mode = &v
7473	return s
7474}
7475
7476// SetProfile sets the Profile field's value.
7477func (s *DolbyVision) SetProfile(v string) *DolbyVision {
7478	s.Profile = &v
7479	return s
7480}
7481
7482// Use these settings when you set DolbyVisionLevel6Mode to SPECIFY to override
7483// the MaxCLL and MaxFALL values in your input with new values.
7484type DolbyVisionLevel6Metadata struct {
7485	_ struct{} `type:"structure"`
7486
7487	// Maximum Content Light Level. Static HDR metadata that corresponds to the
7488	// brightest pixel in the entire stream. Measured in nits.
7489	MaxCll *int64 `locationName:"maxCll" type:"integer"`
7490
7491	// Maximum Frame-Average Light Level. Static HDR metadata that corresponds to
7492	// the highest frame-average brightness in the entire stream. Measured in nits.
7493	MaxFall *int64 `locationName:"maxFall" type:"integer"`
7494}
7495
7496// String returns the string representation
7497func (s DolbyVisionLevel6Metadata) String() string {
7498	return awsutil.Prettify(s)
7499}
7500
7501// GoString returns the string representation
7502func (s DolbyVisionLevel6Metadata) GoString() string {
7503	return s.String()
7504}
7505
7506// SetMaxCll sets the MaxCll field's value.
7507func (s *DolbyVisionLevel6Metadata) SetMaxCll(v int64) *DolbyVisionLevel6Metadata {
7508	s.MaxCll = &v
7509	return s
7510}
7511
7512// SetMaxFall sets the MaxFall field's value.
7513func (s *DolbyVisionLevel6Metadata) SetMaxFall(v int64) *DolbyVisionLevel6Metadata {
7514	s.MaxFall = &v
7515	return s
7516}
7517
7518// Inserts DVB Network Information Table (NIT) at the specified table repetition
7519// interval.
7520type DvbNitSettings struct {
7521	_ struct{} `type:"structure"`
7522
7523	// The numeric value placed in the Network Information Table (NIT).
7524	NetworkId *int64 `locationName:"networkId" type:"integer"`
7525
7526	// The network name text placed in the network_name_descriptor inside the Network
7527	// Information Table. Maximum length is 256 characters.
7528	NetworkName *string `locationName:"networkName" min:"1" type:"string"`
7529
7530	// The number of milliseconds between instances of this table in the output
7531	// transport stream.
7532	NitInterval *int64 `locationName:"nitInterval" min:"25" type:"integer"`
7533}
7534
7535// String returns the string representation
7536func (s DvbNitSettings) String() string {
7537	return awsutil.Prettify(s)
7538}
7539
7540// GoString returns the string representation
7541func (s DvbNitSettings) GoString() string {
7542	return s.String()
7543}
7544
7545// Validate inspects the fields of the type to determine if they are valid.
7546func (s *DvbNitSettings) Validate() error {
7547	invalidParams := request.ErrInvalidParams{Context: "DvbNitSettings"}
7548	if s.NetworkName != nil && len(*s.NetworkName) < 1 {
7549		invalidParams.Add(request.NewErrParamMinLen("NetworkName", 1))
7550	}
7551	if s.NitInterval != nil && *s.NitInterval < 25 {
7552		invalidParams.Add(request.NewErrParamMinValue("NitInterval", 25))
7553	}
7554
7555	if invalidParams.Len() > 0 {
7556		return invalidParams
7557	}
7558	return nil
7559}
7560
7561// SetNetworkId sets the NetworkId field's value.
7562func (s *DvbNitSettings) SetNetworkId(v int64) *DvbNitSettings {
7563	s.NetworkId = &v
7564	return s
7565}
7566
7567// SetNetworkName sets the NetworkName field's value.
7568func (s *DvbNitSettings) SetNetworkName(v string) *DvbNitSettings {
7569	s.NetworkName = &v
7570	return s
7571}
7572
7573// SetNitInterval sets the NitInterval field's value.
7574func (s *DvbNitSettings) SetNitInterval(v int64) *DvbNitSettings {
7575	s.NitInterval = &v
7576	return s
7577}
7578
7579// Inserts DVB Service Description Table (NIT) at the specified table repetition
7580// interval.
7581type DvbSdtSettings struct {
7582	_ struct{} `type:"structure"`
7583
7584	// Selects method of inserting SDT information into output stream. "Follow input
7585	// SDT" copies SDT information from input stream to output stream. "Follow input
7586	// SDT if present" copies SDT information from input stream to output stream
7587	// if SDT information is present in the input, otherwise it will fall back on
7588	// the user-defined values. Enter "SDT Manually" means user will enter the SDT
7589	// information. "No SDT" means output stream will not contain SDT information.
7590	OutputSdt *string `locationName:"outputSdt" type:"string" enum:"OutputSdt"`
7591
7592	// The number of milliseconds between instances of this table in the output
7593	// transport stream.
7594	SdtInterval *int64 `locationName:"sdtInterval" min:"25" type:"integer"`
7595
7596	// The service name placed in the service_descriptor in the Service Description
7597	// Table. Maximum length is 256 characters.
7598	ServiceName *string `locationName:"serviceName" min:"1" type:"string"`
7599
7600	// The service provider name placed in the service_descriptor in the Service
7601	// Description Table. Maximum length is 256 characters.
7602	ServiceProviderName *string `locationName:"serviceProviderName" min:"1" type:"string"`
7603}
7604
7605// String returns the string representation
7606func (s DvbSdtSettings) String() string {
7607	return awsutil.Prettify(s)
7608}
7609
7610// GoString returns the string representation
7611func (s DvbSdtSettings) GoString() string {
7612	return s.String()
7613}
7614
7615// Validate inspects the fields of the type to determine if they are valid.
7616func (s *DvbSdtSettings) Validate() error {
7617	invalidParams := request.ErrInvalidParams{Context: "DvbSdtSettings"}
7618	if s.SdtInterval != nil && *s.SdtInterval < 25 {
7619		invalidParams.Add(request.NewErrParamMinValue("SdtInterval", 25))
7620	}
7621	if s.ServiceName != nil && len(*s.ServiceName) < 1 {
7622		invalidParams.Add(request.NewErrParamMinLen("ServiceName", 1))
7623	}
7624	if s.ServiceProviderName != nil && len(*s.ServiceProviderName) < 1 {
7625		invalidParams.Add(request.NewErrParamMinLen("ServiceProviderName", 1))
7626	}
7627
7628	if invalidParams.Len() > 0 {
7629		return invalidParams
7630	}
7631	return nil
7632}
7633
7634// SetOutputSdt sets the OutputSdt field's value.
7635func (s *DvbSdtSettings) SetOutputSdt(v string) *DvbSdtSettings {
7636	s.OutputSdt = &v
7637	return s
7638}
7639
7640// SetSdtInterval sets the SdtInterval field's value.
7641func (s *DvbSdtSettings) SetSdtInterval(v int64) *DvbSdtSettings {
7642	s.SdtInterval = &v
7643	return s
7644}
7645
7646// SetServiceName sets the ServiceName field's value.
7647func (s *DvbSdtSettings) SetServiceName(v string) *DvbSdtSettings {
7648	s.ServiceName = &v
7649	return s
7650}
7651
7652// SetServiceProviderName sets the ServiceProviderName field's value.
7653func (s *DvbSdtSettings) SetServiceProviderName(v string) *DvbSdtSettings {
7654	s.ServiceProviderName = &v
7655	return s
7656}
7657
7658// DVB-Sub Destination Settings
7659type DvbSubDestinationSettings struct {
7660	_ struct{} `type:"structure"`
7661
7662	// If no explicit x_position or y_position is provided, setting alignment to
7663	// centered will place the captions at the bottom center of the output. Similarly,
7664	// setting a left alignment will align captions to the bottom left of the output.
7665	// If x and y positions are given in conjunction with the alignment parameter,
7666	// the font will be justified (either left or centered) relative to those coordinates.
7667	// This option is not valid for source captions that are STL, 608/embedded or
7668	// teletext. These source settings are already pre-defined by the caption stream.
7669	// All burn-in and DVB-Sub font settings must match.
7670	Alignment *string `locationName:"alignment" type:"string" enum:"DvbSubtitleAlignment"`
7671
7672	// Specifies the color of the rectangle behind the captions.All burn-in and
7673	// DVB-Sub font settings must match.
7674	BackgroundColor *string `locationName:"backgroundColor" type:"string" enum:"DvbSubtitleBackgroundColor"`
7675
7676	// Specifies the opacity of the background rectangle. 255 is opaque; 0 is transparent.
7677	// Leaving this parameter blank is equivalent to setting it to 0 (transparent).
7678	// All burn-in and DVB-Sub font settings must match.
7679	BackgroundOpacity *int64 `locationName:"backgroundOpacity" type:"integer"`
7680
7681	// Specifies the color of the burned-in captions. This option is not valid for
7682	// source captions that are STL, 608/embedded or teletext. These source settings
7683	// are already pre-defined by the caption stream. All burn-in and DVB-Sub font
7684	// settings must match.
7685	FontColor *string `locationName:"fontColor" type:"string" enum:"DvbSubtitleFontColor"`
7686
7687	// Specifies the opacity of the burned-in captions. 255 is opaque; 0 is transparent.All
7688	// burn-in and DVB-Sub font settings must match.
7689	FontOpacity *int64 `locationName:"fontOpacity" type:"integer"`
7690
7691	// Font resolution in DPI (dots per inch); default is 96 dpi.All burn-in and
7692	// DVB-Sub font settings must match.
7693	FontResolution *int64 `locationName:"fontResolution" min:"96" type:"integer"`
7694
7695	// Provide the font script, using an ISO 15924 script code, if the LanguageCode
7696	// is not sufficient for determining the script type. Where LanguageCode or
7697	// CustomLanguageCode is sufficient, use "AUTOMATIC" or leave unset. This is
7698	// used to help determine the appropriate font for rendering DVB-Sub captions.
7699	FontScript *string `locationName:"fontScript" type:"string" enum:"FontScript"`
7700
7701	// A positive integer indicates the exact font size in points. Set to 0 for
7702	// automatic font size selection. All burn-in and DVB-Sub font settings must
7703	// match.
7704	FontSize *int64 `locationName:"fontSize" type:"integer"`
7705
7706	// Specifies font outline color. This option is not valid for source captions
7707	// that are either 608/embedded or teletext. These source settings are already
7708	// pre-defined by the caption stream. All burn-in and DVB-Sub font settings
7709	// must match.
7710	OutlineColor *string `locationName:"outlineColor" type:"string" enum:"DvbSubtitleOutlineColor"`
7711
7712	// Specifies font outline size in pixels. This option is not valid for source
7713	// captions that are either 608/embedded or teletext. These source settings
7714	// are already pre-defined by the caption stream. All burn-in and DVB-Sub font
7715	// settings must match.
7716	OutlineSize *int64 `locationName:"outlineSize" type:"integer"`
7717
7718	// Specifies the color of the shadow cast by the captions.All burn-in and DVB-Sub
7719	// font settings must match.
7720	ShadowColor *string `locationName:"shadowColor" type:"string" enum:"DvbSubtitleShadowColor"`
7721
7722	// Specifies the opacity of the shadow. 255 is opaque; 0 is transparent. Leaving
7723	// this parameter blank is equivalent to setting it to 0 (transparent). All
7724	// burn-in and DVB-Sub font settings must match.
7725	ShadowOpacity *int64 `locationName:"shadowOpacity" type:"integer"`
7726
7727	// Specifies the horizontal offset of the shadow relative to the captions in
7728	// pixels. A value of -2 would result in a shadow offset 2 pixels to the left.
7729	// All burn-in and DVB-Sub font settings must match.
7730	ShadowXOffset *int64 `locationName:"shadowXOffset" type:"integer"`
7731
7732	// Specifies the vertical offset of the shadow relative to the captions in pixels.
7733	// A value of -2 would result in a shadow offset 2 pixels above the text. All
7734	// burn-in and DVB-Sub font settings must match.
7735	ShadowYOffset *int64 `locationName:"shadowYOffset" type:"integer"`
7736
7737	// Specify whether your DVB subtitles are standard or for hearing impaired.
7738	// Choose hearing impaired if your subtitles include audio descriptions and
7739	// dialogue. Choose standard if your subtitles include only dialogue.
7740	SubtitlingType *string `locationName:"subtitlingType" type:"string" enum:"DvbSubtitlingType"`
7741
7742	// Only applies to jobs with input captions in Teletext or STL formats. Specify
7743	// whether the spacing between letters in your captions is set by the captions
7744	// grid or varies depending on letter width. Choose fixed grid to conform to
7745	// the spacing specified in the captions file more accurately. Choose proportional
7746	// to make the text easier to read if the captions are closed caption.
7747	TeletextSpacing *string `locationName:"teletextSpacing" type:"string" enum:"DvbSubtitleTeletextSpacing"`
7748
7749	// Specifies the horizontal position of the caption relative to the left side
7750	// of the output in pixels. A value of 10 would result in the captions starting
7751	// 10 pixels from the left of the output. If no explicit x_position is provided,
7752	// the horizontal caption position will be determined by the alignment parameter.
7753	// This option is not valid for source captions that are STL, 608/embedded or
7754	// teletext. These source settings are already pre-defined by the caption stream.
7755	// All burn-in and DVB-Sub font settings must match.
7756	XPosition *int64 `locationName:"xPosition" type:"integer"`
7757
7758	// Specifies the vertical position of the caption relative to the top of the
7759	// output in pixels. A value of 10 would result in the captions starting 10
7760	// pixels from the top of the output. If no explicit y_position is provided,
7761	// the caption will be positioned towards the bottom of the output. This option
7762	// is not valid for source captions that are STL, 608/embedded or teletext.
7763	// These source settings are already pre-defined by the caption stream. All
7764	// burn-in and DVB-Sub font settings must match.
7765	YPosition *int64 `locationName:"yPosition" type:"integer"`
7766}
7767
7768// String returns the string representation
7769func (s DvbSubDestinationSettings) String() string {
7770	return awsutil.Prettify(s)
7771}
7772
7773// GoString returns the string representation
7774func (s DvbSubDestinationSettings) GoString() string {
7775	return s.String()
7776}
7777
7778// Validate inspects the fields of the type to determine if they are valid.
7779func (s *DvbSubDestinationSettings) Validate() error {
7780	invalidParams := request.ErrInvalidParams{Context: "DvbSubDestinationSettings"}
7781	if s.FontResolution != nil && *s.FontResolution < 96 {
7782		invalidParams.Add(request.NewErrParamMinValue("FontResolution", 96))
7783	}
7784	if s.ShadowXOffset != nil && *s.ShadowXOffset < -2.147483648e+09 {
7785		invalidParams.Add(request.NewErrParamMinValue("ShadowXOffset", -2.147483648e+09))
7786	}
7787	if s.ShadowYOffset != nil && *s.ShadowYOffset < -2.147483648e+09 {
7788		invalidParams.Add(request.NewErrParamMinValue("ShadowYOffset", -2.147483648e+09))
7789	}
7790
7791	if invalidParams.Len() > 0 {
7792		return invalidParams
7793	}
7794	return nil
7795}
7796
7797// SetAlignment sets the Alignment field's value.
7798func (s *DvbSubDestinationSettings) SetAlignment(v string) *DvbSubDestinationSettings {
7799	s.Alignment = &v
7800	return s
7801}
7802
7803// SetBackgroundColor sets the BackgroundColor field's value.
7804func (s *DvbSubDestinationSettings) SetBackgroundColor(v string) *DvbSubDestinationSettings {
7805	s.BackgroundColor = &v
7806	return s
7807}
7808
7809// SetBackgroundOpacity sets the BackgroundOpacity field's value.
7810func (s *DvbSubDestinationSettings) SetBackgroundOpacity(v int64) *DvbSubDestinationSettings {
7811	s.BackgroundOpacity = &v
7812	return s
7813}
7814
7815// SetFontColor sets the FontColor field's value.
7816func (s *DvbSubDestinationSettings) SetFontColor(v string) *DvbSubDestinationSettings {
7817	s.FontColor = &v
7818	return s
7819}
7820
7821// SetFontOpacity sets the FontOpacity field's value.
7822func (s *DvbSubDestinationSettings) SetFontOpacity(v int64) *DvbSubDestinationSettings {
7823	s.FontOpacity = &v
7824	return s
7825}
7826
7827// SetFontResolution sets the FontResolution field's value.
7828func (s *DvbSubDestinationSettings) SetFontResolution(v int64) *DvbSubDestinationSettings {
7829	s.FontResolution = &v
7830	return s
7831}
7832
7833// SetFontScript sets the FontScript field's value.
7834func (s *DvbSubDestinationSettings) SetFontScript(v string) *DvbSubDestinationSettings {
7835	s.FontScript = &v
7836	return s
7837}
7838
7839// SetFontSize sets the FontSize field's value.
7840func (s *DvbSubDestinationSettings) SetFontSize(v int64) *DvbSubDestinationSettings {
7841	s.FontSize = &v
7842	return s
7843}
7844
7845// SetOutlineColor sets the OutlineColor field's value.
7846func (s *DvbSubDestinationSettings) SetOutlineColor(v string) *DvbSubDestinationSettings {
7847	s.OutlineColor = &v
7848	return s
7849}
7850
7851// SetOutlineSize sets the OutlineSize field's value.
7852func (s *DvbSubDestinationSettings) SetOutlineSize(v int64) *DvbSubDestinationSettings {
7853	s.OutlineSize = &v
7854	return s
7855}
7856
7857// SetShadowColor sets the ShadowColor field's value.
7858func (s *DvbSubDestinationSettings) SetShadowColor(v string) *DvbSubDestinationSettings {
7859	s.ShadowColor = &v
7860	return s
7861}
7862
7863// SetShadowOpacity sets the ShadowOpacity field's value.
7864func (s *DvbSubDestinationSettings) SetShadowOpacity(v int64) *DvbSubDestinationSettings {
7865	s.ShadowOpacity = &v
7866	return s
7867}
7868
7869// SetShadowXOffset sets the ShadowXOffset field's value.
7870func (s *DvbSubDestinationSettings) SetShadowXOffset(v int64) *DvbSubDestinationSettings {
7871	s.ShadowXOffset = &v
7872	return s
7873}
7874
7875// SetShadowYOffset sets the ShadowYOffset field's value.
7876func (s *DvbSubDestinationSettings) SetShadowYOffset(v int64) *DvbSubDestinationSettings {
7877	s.ShadowYOffset = &v
7878	return s
7879}
7880
7881// SetSubtitlingType sets the SubtitlingType field's value.
7882func (s *DvbSubDestinationSettings) SetSubtitlingType(v string) *DvbSubDestinationSettings {
7883	s.SubtitlingType = &v
7884	return s
7885}
7886
7887// SetTeletextSpacing sets the TeletextSpacing field's value.
7888func (s *DvbSubDestinationSettings) SetTeletextSpacing(v string) *DvbSubDestinationSettings {
7889	s.TeletextSpacing = &v
7890	return s
7891}
7892
7893// SetXPosition sets the XPosition field's value.
7894func (s *DvbSubDestinationSettings) SetXPosition(v int64) *DvbSubDestinationSettings {
7895	s.XPosition = &v
7896	return s
7897}
7898
7899// SetYPosition sets the YPosition field's value.
7900func (s *DvbSubDestinationSettings) SetYPosition(v int64) *DvbSubDestinationSettings {
7901	s.YPosition = &v
7902	return s
7903}
7904
7905// DVB Sub Source Settings
7906type DvbSubSourceSettings struct {
7907	_ struct{} `type:"structure"`
7908
7909	// When using DVB-Sub with Burn-In or SMPTE-TT, use this PID for the source
7910	// content. Unused for DVB-Sub passthrough. All DVB-Sub content is passed through,
7911	// regardless of selectors.
7912	Pid *int64 `locationName:"pid" min:"1" type:"integer"`
7913}
7914
7915// String returns the string representation
7916func (s DvbSubSourceSettings) String() string {
7917	return awsutil.Prettify(s)
7918}
7919
7920// GoString returns the string representation
7921func (s DvbSubSourceSettings) GoString() string {
7922	return s.String()
7923}
7924
7925// Validate inspects the fields of the type to determine if they are valid.
7926func (s *DvbSubSourceSettings) Validate() error {
7927	invalidParams := request.ErrInvalidParams{Context: "DvbSubSourceSettings"}
7928	if s.Pid != nil && *s.Pid < 1 {
7929		invalidParams.Add(request.NewErrParamMinValue("Pid", 1))
7930	}
7931
7932	if invalidParams.Len() > 0 {
7933		return invalidParams
7934	}
7935	return nil
7936}
7937
7938// SetPid sets the Pid field's value.
7939func (s *DvbSubSourceSettings) SetPid(v int64) *DvbSubSourceSettings {
7940	s.Pid = &v
7941	return s
7942}
7943
7944// Inserts DVB Time and Date Table (TDT) at the specified table repetition interval.
7945type DvbTdtSettings struct {
7946	_ struct{} `type:"structure"`
7947
7948	// The number of milliseconds between instances of this table in the output
7949	// transport stream.
7950	TdtInterval *int64 `locationName:"tdtInterval" min:"1000" type:"integer"`
7951}
7952
7953// String returns the string representation
7954func (s DvbTdtSettings) String() string {
7955	return awsutil.Prettify(s)
7956}
7957
7958// GoString returns the string representation
7959func (s DvbTdtSettings) GoString() string {
7960	return s.String()
7961}
7962
7963// Validate inspects the fields of the type to determine if they are valid.
7964func (s *DvbTdtSettings) Validate() error {
7965	invalidParams := request.ErrInvalidParams{Context: "DvbTdtSettings"}
7966	if s.TdtInterval != nil && *s.TdtInterval < 1000 {
7967		invalidParams.Add(request.NewErrParamMinValue("TdtInterval", 1000))
7968	}
7969
7970	if invalidParams.Len() > 0 {
7971		return invalidParams
7972	}
7973	return nil
7974}
7975
7976// SetTdtInterval sets the TdtInterval field's value.
7977func (s *DvbTdtSettings) SetTdtInterval(v int64) *DvbTdtSettings {
7978	s.TdtInterval = &v
7979	return s
7980}
7981
7982// Required when you set (Codec) under (AudioDescriptions)>(CodecSettings) to
7983// the value EAC3_ATMOS.
7984type Eac3AtmosSettings struct {
7985	_ struct{} `type:"structure"`
7986
7987	// Specify the average bitrate in bits per second.Valid values: 384k, 448k,
7988	// 640k, 768k
7989	Bitrate *int64 `locationName:"bitrate" min:"384000" type:"integer"`
7990
7991	// Specify the bitstream mode for the E-AC-3 stream that the encoder emits.
7992	// For more information about the EAC3 bitstream mode, see ATSC A/52-2012 (Annex
7993	// E).
7994	BitstreamMode *string `locationName:"bitstreamMode" type:"string" enum:"Eac3AtmosBitstreamMode"`
7995
7996	// The coding mode for Dolby Digital Plus JOC (Atmos) is always 9.1.6 (CODING_MODE_9_1_6).
7997	CodingMode *string `locationName:"codingMode" type:"string" enum:"Eac3AtmosCodingMode"`
7998
7999	// Enable Dolby Dialogue Intelligence to adjust loudness based on dialogue analysis.
8000	DialogueIntelligence *string `locationName:"dialogueIntelligence" type:"string" enum:"Eac3AtmosDialogueIntelligence"`
8001
8002	// Specify the absolute peak level for a signal with dynamic range compression.
8003	DynamicRangeCompressionLine *string `locationName:"dynamicRangeCompressionLine" type:"string" enum:"Eac3AtmosDynamicRangeCompressionLine"`
8004
8005	// Specify how the service limits the audio dynamic range when compressing the
8006	// audio.
8007	DynamicRangeCompressionRf *string `locationName:"dynamicRangeCompressionRf" type:"string" enum:"Eac3AtmosDynamicRangeCompressionRf"`
8008
8009	// Specify a value for the following Dolby Atmos setting: Left only/Right only
8010	// center mix(Lo/Ro center). MediaConvert uses this value for downmixing. How
8011	// the service uses thisvalue depends on the value that you choose for Stereo
8012	// downmix (Eac3AtmosStereoDownmix).Valid values: 3.0, 1.5, 0.0, -1.5, -3.0,
8013	// -4.5, and -6.0.
8014	LoRoCenterMixLevel *float64 `locationName:"loRoCenterMixLevel" type:"double"`
8015
8016	// Specify a value for the following Dolby Atmos setting: Left only/Right only
8017	// (Lo/Ro surround). MediaConvert uses this value for downmixing. How the service
8018	// uses this value depends on the value that you choose for Stereo downmix (Eac3AtmosStereoDownmix).
8019	// Valid values: -1.5, -3.0, -4.5, -6.0, and -60. The value -60 mutes the channel.
8020	LoRoSurroundMixLevel *float64 `locationName:"loRoSurroundMixLevel" type:"double"`
8021
8022	// Specify a value for the following Dolby Atmos setting: Left total/Right total
8023	// center mix (Lt/Rt center). MediaConvert uses this value for downmixing. How
8024	// the service uses this value depends on the value that you choose for Stereo
8025	// downmix (Eac3AtmosStereoDownmix). Valid values: 3.0, 1.5, 0.0, -1.5, -3.0,
8026	// -4.5, and -6.0.
8027	LtRtCenterMixLevel *float64 `locationName:"ltRtCenterMixLevel" type:"double"`
8028
8029	// Specify a value for the following Dolby Atmos setting: Left total/Right total
8030	// surround mix (Lt/Rt surround). MediaConvert uses this value for downmixing.
8031	// How the service uses this value depends on the value that you choose for
8032	// Stereo downmix (Eac3AtmosStereoDownmix). Valid values: -1.5, -3.0, -4.5,
8033	// -6.0, and -60. The value -60 mutes the channel.
8034	LtRtSurroundMixLevel *float64 `locationName:"ltRtSurroundMixLevel" type:"double"`
8035
8036	// Choose how the service meters the loudness of your audio.
8037	MeteringMode *string `locationName:"meteringMode" type:"string" enum:"Eac3AtmosMeteringMode"`
8038
8039	// This value is always 48000. It represents the sample rate in Hz.
8040	SampleRate *int64 `locationName:"sampleRate" min:"48000" type:"integer"`
8041
8042	// Specify the percentage of audio content that must be speech before the encoder
8043	// uses the measured speech loudness as the overall program loudness.
8044	SpeechThreshold *int64 `locationName:"speechThreshold" min:"1" type:"integer"`
8045
8046	// Choose how the service does stereo downmixing.
8047	StereoDownmix *string `locationName:"stereoDownmix" type:"string" enum:"Eac3AtmosStereoDownmix"`
8048
8049	// Specify whether your input audio has an additional center rear surround channel
8050	// matrix encoded into your left and right surround channels.
8051	SurroundExMode *string `locationName:"surroundExMode" type:"string" enum:"Eac3AtmosSurroundExMode"`
8052}
8053
8054// String returns the string representation
8055func (s Eac3AtmosSettings) String() string {
8056	return awsutil.Prettify(s)
8057}
8058
8059// GoString returns the string representation
8060func (s Eac3AtmosSettings) GoString() string {
8061	return s.String()
8062}
8063
8064// Validate inspects the fields of the type to determine if they are valid.
8065func (s *Eac3AtmosSettings) Validate() error {
8066	invalidParams := request.ErrInvalidParams{Context: "Eac3AtmosSettings"}
8067	if s.Bitrate != nil && *s.Bitrate < 384000 {
8068		invalidParams.Add(request.NewErrParamMinValue("Bitrate", 384000))
8069	}
8070	if s.SampleRate != nil && *s.SampleRate < 48000 {
8071		invalidParams.Add(request.NewErrParamMinValue("SampleRate", 48000))
8072	}
8073	if s.SpeechThreshold != nil && *s.SpeechThreshold < 1 {
8074		invalidParams.Add(request.NewErrParamMinValue("SpeechThreshold", 1))
8075	}
8076
8077	if invalidParams.Len() > 0 {
8078		return invalidParams
8079	}
8080	return nil
8081}
8082
8083// SetBitrate sets the Bitrate field's value.
8084func (s *Eac3AtmosSettings) SetBitrate(v int64) *Eac3AtmosSettings {
8085	s.Bitrate = &v
8086	return s
8087}
8088
8089// SetBitstreamMode sets the BitstreamMode field's value.
8090func (s *Eac3AtmosSettings) SetBitstreamMode(v string) *Eac3AtmosSettings {
8091	s.BitstreamMode = &v
8092	return s
8093}
8094
8095// SetCodingMode sets the CodingMode field's value.
8096func (s *Eac3AtmosSettings) SetCodingMode(v string) *Eac3AtmosSettings {
8097	s.CodingMode = &v
8098	return s
8099}
8100
8101// SetDialogueIntelligence sets the DialogueIntelligence field's value.
8102func (s *Eac3AtmosSettings) SetDialogueIntelligence(v string) *Eac3AtmosSettings {
8103	s.DialogueIntelligence = &v
8104	return s
8105}
8106
8107// SetDynamicRangeCompressionLine sets the DynamicRangeCompressionLine field's value.
8108func (s *Eac3AtmosSettings) SetDynamicRangeCompressionLine(v string) *Eac3AtmosSettings {
8109	s.DynamicRangeCompressionLine = &v
8110	return s
8111}
8112
8113// SetDynamicRangeCompressionRf sets the DynamicRangeCompressionRf field's value.
8114func (s *Eac3AtmosSettings) SetDynamicRangeCompressionRf(v string) *Eac3AtmosSettings {
8115	s.DynamicRangeCompressionRf = &v
8116	return s
8117}
8118
8119// SetLoRoCenterMixLevel sets the LoRoCenterMixLevel field's value.
8120func (s *Eac3AtmosSettings) SetLoRoCenterMixLevel(v float64) *Eac3AtmosSettings {
8121	s.LoRoCenterMixLevel = &v
8122	return s
8123}
8124
8125// SetLoRoSurroundMixLevel sets the LoRoSurroundMixLevel field's value.
8126func (s *Eac3AtmosSettings) SetLoRoSurroundMixLevel(v float64) *Eac3AtmosSettings {
8127	s.LoRoSurroundMixLevel = &v
8128	return s
8129}
8130
8131// SetLtRtCenterMixLevel sets the LtRtCenterMixLevel field's value.
8132func (s *Eac3AtmosSettings) SetLtRtCenterMixLevel(v float64) *Eac3AtmosSettings {
8133	s.LtRtCenterMixLevel = &v
8134	return s
8135}
8136
8137// SetLtRtSurroundMixLevel sets the LtRtSurroundMixLevel field's value.
8138func (s *Eac3AtmosSettings) SetLtRtSurroundMixLevel(v float64) *Eac3AtmosSettings {
8139	s.LtRtSurroundMixLevel = &v
8140	return s
8141}
8142
8143// SetMeteringMode sets the MeteringMode field's value.
8144func (s *Eac3AtmosSettings) SetMeteringMode(v string) *Eac3AtmosSettings {
8145	s.MeteringMode = &v
8146	return s
8147}
8148
8149// SetSampleRate sets the SampleRate field's value.
8150func (s *Eac3AtmosSettings) SetSampleRate(v int64) *Eac3AtmosSettings {
8151	s.SampleRate = &v
8152	return s
8153}
8154
8155// SetSpeechThreshold sets the SpeechThreshold field's value.
8156func (s *Eac3AtmosSettings) SetSpeechThreshold(v int64) *Eac3AtmosSettings {
8157	s.SpeechThreshold = &v
8158	return s
8159}
8160
8161// SetStereoDownmix sets the StereoDownmix field's value.
8162func (s *Eac3AtmosSettings) SetStereoDownmix(v string) *Eac3AtmosSettings {
8163	s.StereoDownmix = &v
8164	return s
8165}
8166
8167// SetSurroundExMode sets the SurroundExMode field's value.
8168func (s *Eac3AtmosSettings) SetSurroundExMode(v string) *Eac3AtmosSettings {
8169	s.SurroundExMode = &v
8170	return s
8171}
8172
8173// Required when you set (Codec) under (AudioDescriptions)>(CodecSettings) to
8174// the value EAC3.
8175type Eac3Settings struct {
8176	_ struct{} `type:"structure"`
8177
8178	// If set to ATTENUATE_3_DB, applies a 3 dB attenuation to the surround channels.
8179	// Only used for 3/2 coding mode.
8180	AttenuationControl *string `locationName:"attenuationControl" type:"string" enum:"Eac3AttenuationControl"`
8181
8182	// Specify the average bitrate in bits per second. Valid bitrates depend on
8183	// the coding mode.
8184	Bitrate *int64 `locationName:"bitrate" min:"64000" type:"integer"`
8185
8186	// Specify the bitstream mode for the E-AC-3 stream that the encoder emits.
8187	// For more information about the EAC3 bitstream mode, see ATSC A/52-2012 (Annex
8188	// E).
8189	BitstreamMode *string `locationName:"bitstreamMode" type:"string" enum:"Eac3BitstreamMode"`
8190
8191	// Dolby Digital Plus coding mode. Determines number of channels.
8192	CodingMode *string `locationName:"codingMode" type:"string" enum:"Eac3CodingMode"`
8193
8194	// Activates a DC highpass filter for all input channels.
8195	DcFilter *string `locationName:"dcFilter" type:"string" enum:"Eac3DcFilter"`
8196
8197	// Sets the dialnorm for the output. If blank and input audio is Dolby Digital
8198	// Plus, dialnorm will be passed through.
8199	Dialnorm *int64 `locationName:"dialnorm" min:"1" type:"integer"`
8200
8201	// Specify the absolute peak level for a signal with dynamic range compression.
8202	DynamicRangeCompressionLine *string `locationName:"dynamicRangeCompressionLine" type:"string" enum:"Eac3DynamicRangeCompressionLine"`
8203
8204	// Specify how the service limits the audio dynamic range when compressing the
8205	// audio.
8206	DynamicRangeCompressionRf *string `locationName:"dynamicRangeCompressionRf" type:"string" enum:"Eac3DynamicRangeCompressionRf"`
8207
8208	// When encoding 3/2 audio, controls whether the LFE channel is enabled
8209	LfeControl *string `locationName:"lfeControl" type:"string" enum:"Eac3LfeControl"`
8210
8211	// Applies a 120Hz lowpass filter to the LFE channel prior to encoding. Only
8212	// valid with 3_2_LFE coding mode.
8213	LfeFilter *string `locationName:"lfeFilter" type:"string" enum:"Eac3LfeFilter"`
8214
8215	// Specify a value for the following Dolby Digital Plus setting: Left only/Right
8216	// only center mix (Lo/Ro center). MediaConvert uses this value for downmixing.
8217	// How the service uses this value depends on the value that you choose for
8218	// Stereo downmix (Eac3StereoDownmix). Valid values: 3.0, 1.5, 0.0, -1.5, -3.0,
8219	// -4.5, -6.0, and -60. The value -60 mutes the channel. This setting applies
8220	// only if you keep the default value of 3/2 - L, R, C, Ls, Rs (CODING_MODE_3_2)
8221	// for the setting Coding mode (Eac3CodingMode). If you choose a different value
8222	// for Coding mode, the service ignores Left only/Right only center (loRoCenterMixLevel).
8223	LoRoCenterMixLevel *float64 `locationName:"loRoCenterMixLevel" type:"double"`
8224
8225	// Specify a value for the following Dolby Digital Plus setting: Left only/Right
8226	// only (Lo/Ro surround). MediaConvert uses this value for downmixing. How the
8227	// service uses this value depends on the value that you choose for Stereo downmix
8228	// (Eac3StereoDownmix). Valid values: -1.5, -3.0, -4.5, -6.0, and -60. The value
8229	// -60 mutes the channel. This setting applies only if you keep the default
8230	// value of 3/2 - L, R, C, Ls, Rs (CODING_MODE_3_2) for the setting Coding mode
8231	// (Eac3CodingMode). If you choose a different value for Coding mode, the service
8232	// ignores Left only/Right only surround (loRoSurroundMixLevel).
8233	LoRoSurroundMixLevel *float64 `locationName:"loRoSurroundMixLevel" type:"double"`
8234
8235	// Specify a value for the following Dolby Digital Plus setting: Left total/Right
8236	// total center mix (Lt/Rt center). MediaConvert uses this value for downmixing.
8237	// How the service uses this value depends on the value that you choose for
8238	// Stereo downmix (Eac3StereoDownmix). Valid values: 3.0, 1.5, 0.0, -1.5, -3.0,
8239	// -4.5, -6.0, and -60. The value -60 mutes the channel. This setting applies
8240	// only if you keep the default value of 3/2 - L, R, C, Ls, Rs (CODING_MODE_3_2)
8241	// for the setting Coding mode (Eac3CodingMode). If you choose a different value
8242	// for Coding mode, the service ignores Left total/Right total center (ltRtCenterMixLevel).
8243	LtRtCenterMixLevel *float64 `locationName:"ltRtCenterMixLevel" type:"double"`
8244
8245	// Specify a value for the following Dolby Digital Plus setting: Left total/Right
8246	// total surround mix (Lt/Rt surround). MediaConvert uses this value for downmixing.
8247	// How the service uses this value depends on the value that you choose for
8248	// Stereo downmix (Eac3StereoDownmix). Valid values: -1.5, -3.0, -4.5, -6.0,
8249	// and -60. The value -60 mutes the channel. This setting applies only if you
8250	// keep the default value of 3/2 - L, R, C, Ls, Rs (CODING_MODE_3_2) for the
8251	// setting Coding mode (Eac3CodingMode). If you choose a different value for
8252	// Coding mode, the service ignores Left total/Right total surround (ltRtSurroundMixLevel).
8253	LtRtSurroundMixLevel *float64 `locationName:"ltRtSurroundMixLevel" type:"double"`
8254
8255	// When set to FOLLOW_INPUT, encoder metadata will be sourced from the DD, DD+,
8256	// or DolbyE decoder that supplied this audio data. If audio was not supplied
8257	// from one of these streams, then the static metadata settings will be used.
8258	MetadataControl *string `locationName:"metadataControl" type:"string" enum:"Eac3MetadataControl"`
8259
8260	// When set to WHEN_POSSIBLE, input DD+ audio will be passed through if it is
8261	// present on the input. this detection is dynamic over the life of the transcode.
8262	// Inputs that alternate between DD+ and non-DD+ content will have a consistent
8263	// DD+ output as the system alternates between passthrough and encoding.
8264	PassthroughControl *string `locationName:"passthroughControl" type:"string" enum:"Eac3PassthroughControl"`
8265
8266	// Controls the amount of phase-shift applied to the surround channels. Only
8267	// used for 3/2 coding mode.
8268	PhaseControl *string `locationName:"phaseControl" type:"string" enum:"Eac3PhaseControl"`
8269
8270	// This value is always 48000. It represents the sample rate in Hz.
8271	SampleRate *int64 `locationName:"sampleRate" min:"48000" type:"integer"`
8272
8273	// Choose how the service does stereo downmixing. This setting only applies
8274	// if you keep the default value of 3/2 - L, R, C, Ls, Rs (CODING_MODE_3_2)
8275	// for the setting Coding mode (Eac3CodingMode). If you choose a different value
8276	// for Coding mode, the service ignores Stereo downmix (Eac3StereoDownmix).
8277	StereoDownmix *string `locationName:"stereoDownmix" type:"string" enum:"Eac3StereoDownmix"`
8278
8279	// When encoding 3/2 audio, sets whether an extra center back surround channel
8280	// is matrix encoded into the left and right surround channels.
8281	SurroundExMode *string `locationName:"surroundExMode" type:"string" enum:"Eac3SurroundExMode"`
8282
8283	// When encoding 2/0 audio, sets whether Dolby Surround is matrix encoded into
8284	// the two channels.
8285	SurroundMode *string `locationName:"surroundMode" type:"string" enum:"Eac3SurroundMode"`
8286}
8287
8288// String returns the string representation
8289func (s Eac3Settings) String() string {
8290	return awsutil.Prettify(s)
8291}
8292
8293// GoString returns the string representation
8294func (s Eac3Settings) GoString() string {
8295	return s.String()
8296}
8297
8298// Validate inspects the fields of the type to determine if they are valid.
8299func (s *Eac3Settings) Validate() error {
8300	invalidParams := request.ErrInvalidParams{Context: "Eac3Settings"}
8301	if s.Bitrate != nil && *s.Bitrate < 64000 {
8302		invalidParams.Add(request.NewErrParamMinValue("Bitrate", 64000))
8303	}
8304	if s.Dialnorm != nil && *s.Dialnorm < 1 {
8305		invalidParams.Add(request.NewErrParamMinValue("Dialnorm", 1))
8306	}
8307	if s.SampleRate != nil && *s.SampleRate < 48000 {
8308		invalidParams.Add(request.NewErrParamMinValue("SampleRate", 48000))
8309	}
8310
8311	if invalidParams.Len() > 0 {
8312		return invalidParams
8313	}
8314	return nil
8315}
8316
8317// SetAttenuationControl sets the AttenuationControl field's value.
8318func (s *Eac3Settings) SetAttenuationControl(v string) *Eac3Settings {
8319	s.AttenuationControl = &v
8320	return s
8321}
8322
8323// SetBitrate sets the Bitrate field's value.
8324func (s *Eac3Settings) SetBitrate(v int64) *Eac3Settings {
8325	s.Bitrate = &v
8326	return s
8327}
8328
8329// SetBitstreamMode sets the BitstreamMode field's value.
8330func (s *Eac3Settings) SetBitstreamMode(v string) *Eac3Settings {
8331	s.BitstreamMode = &v
8332	return s
8333}
8334
8335// SetCodingMode sets the CodingMode field's value.
8336func (s *Eac3Settings) SetCodingMode(v string) *Eac3Settings {
8337	s.CodingMode = &v
8338	return s
8339}
8340
8341// SetDcFilter sets the DcFilter field's value.
8342func (s *Eac3Settings) SetDcFilter(v string) *Eac3Settings {
8343	s.DcFilter = &v
8344	return s
8345}
8346
8347// SetDialnorm sets the Dialnorm field's value.
8348func (s *Eac3Settings) SetDialnorm(v int64) *Eac3Settings {
8349	s.Dialnorm = &v
8350	return s
8351}
8352
8353// SetDynamicRangeCompressionLine sets the DynamicRangeCompressionLine field's value.
8354func (s *Eac3Settings) SetDynamicRangeCompressionLine(v string) *Eac3Settings {
8355	s.DynamicRangeCompressionLine = &v
8356	return s
8357}
8358
8359// SetDynamicRangeCompressionRf sets the DynamicRangeCompressionRf field's value.
8360func (s *Eac3Settings) SetDynamicRangeCompressionRf(v string) *Eac3Settings {
8361	s.DynamicRangeCompressionRf = &v
8362	return s
8363}
8364
8365// SetLfeControl sets the LfeControl field's value.
8366func (s *Eac3Settings) SetLfeControl(v string) *Eac3Settings {
8367	s.LfeControl = &v
8368	return s
8369}
8370
8371// SetLfeFilter sets the LfeFilter field's value.
8372func (s *Eac3Settings) SetLfeFilter(v string) *Eac3Settings {
8373	s.LfeFilter = &v
8374	return s
8375}
8376
8377// SetLoRoCenterMixLevel sets the LoRoCenterMixLevel field's value.
8378func (s *Eac3Settings) SetLoRoCenterMixLevel(v float64) *Eac3Settings {
8379	s.LoRoCenterMixLevel = &v
8380	return s
8381}
8382
8383// SetLoRoSurroundMixLevel sets the LoRoSurroundMixLevel field's value.
8384func (s *Eac3Settings) SetLoRoSurroundMixLevel(v float64) *Eac3Settings {
8385	s.LoRoSurroundMixLevel = &v
8386	return s
8387}
8388
8389// SetLtRtCenterMixLevel sets the LtRtCenterMixLevel field's value.
8390func (s *Eac3Settings) SetLtRtCenterMixLevel(v float64) *Eac3Settings {
8391	s.LtRtCenterMixLevel = &v
8392	return s
8393}
8394
8395// SetLtRtSurroundMixLevel sets the LtRtSurroundMixLevel field's value.
8396func (s *Eac3Settings) SetLtRtSurroundMixLevel(v float64) *Eac3Settings {
8397	s.LtRtSurroundMixLevel = &v
8398	return s
8399}
8400
8401// SetMetadataControl sets the MetadataControl field's value.
8402func (s *Eac3Settings) SetMetadataControl(v string) *Eac3Settings {
8403	s.MetadataControl = &v
8404	return s
8405}
8406
8407// SetPassthroughControl sets the PassthroughControl field's value.
8408func (s *Eac3Settings) SetPassthroughControl(v string) *Eac3Settings {
8409	s.PassthroughControl = &v
8410	return s
8411}
8412
8413// SetPhaseControl sets the PhaseControl field's value.
8414func (s *Eac3Settings) SetPhaseControl(v string) *Eac3Settings {
8415	s.PhaseControl = &v
8416	return s
8417}
8418
8419// SetSampleRate sets the SampleRate field's value.
8420func (s *Eac3Settings) SetSampleRate(v int64) *Eac3Settings {
8421	s.SampleRate = &v
8422	return s
8423}
8424
8425// SetStereoDownmix sets the StereoDownmix field's value.
8426func (s *Eac3Settings) SetStereoDownmix(v string) *Eac3Settings {
8427	s.StereoDownmix = &v
8428	return s
8429}
8430
8431// SetSurroundExMode sets the SurroundExMode field's value.
8432func (s *Eac3Settings) SetSurroundExMode(v string) *Eac3Settings {
8433	s.SurroundExMode = &v
8434	return s
8435}
8436
8437// SetSurroundMode sets the SurroundMode field's value.
8438func (s *Eac3Settings) SetSurroundMode(v string) *Eac3Settings {
8439	s.SurroundMode = &v
8440	return s
8441}
8442
8443// Settings specific to embedded/ancillary caption outputs, including 608/708
8444// Channel destination number.
8445type EmbeddedDestinationSettings struct {
8446	_ struct{} `type:"structure"`
8447
8448	// Ignore this setting unless your input captions are SCC format and your output
8449	// captions are embedded in the video stream. Specify a CC number for each captions
8450	// channel in this output. If you have two channels, choose CC numbers that
8451	// aren't in the same field. For example, choose 1 and 3. For more information,
8452	// see https://docs.aws.amazon.com/console/mediaconvert/dual-scc-to-embedded.
8453	Destination608ChannelNumber *int64 `locationName:"destination608ChannelNumber" min:"1" type:"integer"`
8454
8455	// Ignore this setting unless your input captions are SCC format and you want
8456	// both 608 and 708 captions embedded in your output stream. Optionally, specify
8457	// the 708 service number for each output captions channel. Choose a different
8458	// number for each channel. To use this setting, also set Force 608 to 708 upconvert
8459	// (Convert608To708) to Upconvert (UPCONVERT) in your input captions selector
8460	// settings. If you choose to upconvert but don't specify a 708 service number,
8461	// MediaConvert uses the number that you specify for CC channel number (destination608ChannelNumber)
8462	// for the 708 service number. For more information, see https://docs.aws.amazon.com/console/mediaconvert/dual-scc-to-embedded.
8463	Destination708ServiceNumber *int64 `locationName:"destination708ServiceNumber" min:"1" type:"integer"`
8464}
8465
8466// String returns the string representation
8467func (s EmbeddedDestinationSettings) String() string {
8468	return awsutil.Prettify(s)
8469}
8470
8471// GoString returns the string representation
8472func (s EmbeddedDestinationSettings) GoString() string {
8473	return s.String()
8474}
8475
8476// Validate inspects the fields of the type to determine if they are valid.
8477func (s *EmbeddedDestinationSettings) Validate() error {
8478	invalidParams := request.ErrInvalidParams{Context: "EmbeddedDestinationSettings"}
8479	if s.Destination608ChannelNumber != nil && *s.Destination608ChannelNumber < 1 {
8480		invalidParams.Add(request.NewErrParamMinValue("Destination608ChannelNumber", 1))
8481	}
8482	if s.Destination708ServiceNumber != nil && *s.Destination708ServiceNumber < 1 {
8483		invalidParams.Add(request.NewErrParamMinValue("Destination708ServiceNumber", 1))
8484	}
8485
8486	if invalidParams.Len() > 0 {
8487		return invalidParams
8488	}
8489	return nil
8490}
8491
8492// SetDestination608ChannelNumber sets the Destination608ChannelNumber field's value.
8493func (s *EmbeddedDestinationSettings) SetDestination608ChannelNumber(v int64) *EmbeddedDestinationSettings {
8494	s.Destination608ChannelNumber = &v
8495	return s
8496}
8497
8498// SetDestination708ServiceNumber sets the Destination708ServiceNumber field's value.
8499func (s *EmbeddedDestinationSettings) SetDestination708ServiceNumber(v int64) *EmbeddedDestinationSettings {
8500	s.Destination708ServiceNumber = &v
8501	return s
8502}
8503
8504// Settings for embedded captions Source
8505type EmbeddedSourceSettings struct {
8506	_ struct{} `type:"structure"`
8507
8508	// Specify whether this set of input captions appears in your outputs in both
8509	// 608 and 708 format. If you choose Upconvert (UPCONVERT), MediaConvert includes
8510	// the captions data in two ways: it passes the 608 data through using the 608
8511	// compatibility bytes fields of the 708 wrapper, and it also translates the
8512	// 608 data into 708.
8513	Convert608To708 *string `locationName:"convert608To708" type:"string" enum:"EmbeddedConvert608To708"`
8514
8515	// Specifies the 608/708 channel number within the video track from which to
8516	// extract captions. Unused for passthrough.
8517	Source608ChannelNumber *int64 `locationName:"source608ChannelNumber" min:"1" type:"integer"`
8518
8519	// Specifies the video track index used for extracting captions. The system
8520	// only supports one input video track, so this should always be set to '1'.
8521	Source608TrackNumber *int64 `locationName:"source608TrackNumber" min:"1" type:"integer"`
8522
8523	// By default, the service terminates any unterminated captions at the end of
8524	// each input. If you want the caption to continue onto your next input, disable
8525	// this setting.
8526	TerminateCaptions *string `locationName:"terminateCaptions" type:"string" enum:"EmbeddedTerminateCaptions"`
8527}
8528
8529// String returns the string representation
8530func (s EmbeddedSourceSettings) String() string {
8531	return awsutil.Prettify(s)
8532}
8533
8534// GoString returns the string representation
8535func (s EmbeddedSourceSettings) GoString() string {
8536	return s.String()
8537}
8538
8539// Validate inspects the fields of the type to determine if they are valid.
8540func (s *EmbeddedSourceSettings) Validate() error {
8541	invalidParams := request.ErrInvalidParams{Context: "EmbeddedSourceSettings"}
8542	if s.Source608ChannelNumber != nil && *s.Source608ChannelNumber < 1 {
8543		invalidParams.Add(request.NewErrParamMinValue("Source608ChannelNumber", 1))
8544	}
8545	if s.Source608TrackNumber != nil && *s.Source608TrackNumber < 1 {
8546		invalidParams.Add(request.NewErrParamMinValue("Source608TrackNumber", 1))
8547	}
8548
8549	if invalidParams.Len() > 0 {
8550		return invalidParams
8551	}
8552	return nil
8553}
8554
8555// SetConvert608To708 sets the Convert608To708 field's value.
8556func (s *EmbeddedSourceSettings) SetConvert608To708(v string) *EmbeddedSourceSettings {
8557	s.Convert608To708 = &v
8558	return s
8559}
8560
8561// SetSource608ChannelNumber sets the Source608ChannelNumber field's value.
8562func (s *EmbeddedSourceSettings) SetSource608ChannelNumber(v int64) *EmbeddedSourceSettings {
8563	s.Source608ChannelNumber = &v
8564	return s
8565}
8566
8567// SetSource608TrackNumber sets the Source608TrackNumber field's value.
8568func (s *EmbeddedSourceSettings) SetSource608TrackNumber(v int64) *EmbeddedSourceSettings {
8569	s.Source608TrackNumber = &v
8570	return s
8571}
8572
8573// SetTerminateCaptions sets the TerminateCaptions field's value.
8574func (s *EmbeddedSourceSettings) SetTerminateCaptions(v string) *EmbeddedSourceSettings {
8575	s.TerminateCaptions = &v
8576	return s
8577}
8578
8579// Describes an account-specific API endpoint.
8580type Endpoint struct {
8581	_ struct{} `type:"structure"`
8582
8583	// URL of endpoint
8584	Url *string `locationName:"url" type:"string"`
8585}
8586
8587// String returns the string representation
8588func (s Endpoint) String() string {
8589	return awsutil.Prettify(s)
8590}
8591
8592// GoString returns the string representation
8593func (s Endpoint) GoString() string {
8594	return s.String()
8595}
8596
8597// SetUrl sets the Url field's value.
8598func (s *Endpoint) SetUrl(v string) *Endpoint {
8599	s.Url = &v
8600	return s
8601}
8602
8603// ESAM ManifestConfirmConditionNotification defined by OC-SP-ESAM-API-I03-131025.
8604type EsamManifestConfirmConditionNotification struct {
8605	_ struct{} `type:"structure"`
8606
8607	// Provide your ESAM ManifestConfirmConditionNotification XML document inside
8608	// your JSON job settings. Form the XML document as per OC-SP-ESAM-API-I03-131025.
8609	// The transcoder will use the Manifest Conditioning instructions in the message
8610	// that you supply.
8611	MccXml *string `locationName:"mccXml" type:"string"`
8612}
8613
8614// String returns the string representation
8615func (s EsamManifestConfirmConditionNotification) String() string {
8616	return awsutil.Prettify(s)
8617}
8618
8619// GoString returns the string representation
8620func (s EsamManifestConfirmConditionNotification) GoString() string {
8621	return s.String()
8622}
8623
8624// SetMccXml sets the MccXml field's value.
8625func (s *EsamManifestConfirmConditionNotification) SetMccXml(v string) *EsamManifestConfirmConditionNotification {
8626	s.MccXml = &v
8627	return s
8628}
8629
8630// Settings for Event Signaling And Messaging (ESAM). If you don't do ad insertion,
8631// you can ignore these settings.
8632type EsamSettings struct {
8633	_ struct{} `type:"structure"`
8634
8635	// Specifies an ESAM ManifestConfirmConditionNotification XML as per OC-SP-ESAM-API-I03-131025.
8636	// The transcoder uses the manifest conditioning instructions that you provide
8637	// in the setting MCC XML (mccXml).
8638	ManifestConfirmConditionNotification *EsamManifestConfirmConditionNotification `locationName:"manifestConfirmConditionNotification" type:"structure"`
8639
8640	// Specifies the stream distance, in milliseconds, between the SCTE 35 messages
8641	// that the transcoder places and the splice points that they refer to. If the
8642	// time between the start of the asset and the SCTE-35 message is less than
8643	// this value, then the transcoder places the SCTE-35 marker at the beginning
8644	// of the stream.
8645	ResponseSignalPreroll *int64 `locationName:"responseSignalPreroll" type:"integer"`
8646
8647	// Specifies an ESAM SignalProcessingNotification XML as per OC-SP-ESAM-API-I03-131025.
8648	// The transcoder uses the signal processing instructions that you provide in
8649	// the setting SCC XML (sccXml).
8650	SignalProcessingNotification *EsamSignalProcessingNotification `locationName:"signalProcessingNotification" type:"structure"`
8651}
8652
8653// String returns the string representation
8654func (s EsamSettings) String() string {
8655	return awsutil.Prettify(s)
8656}
8657
8658// GoString returns the string representation
8659func (s EsamSettings) GoString() string {
8660	return s.String()
8661}
8662
8663// SetManifestConfirmConditionNotification sets the ManifestConfirmConditionNotification field's value.
8664func (s *EsamSettings) SetManifestConfirmConditionNotification(v *EsamManifestConfirmConditionNotification) *EsamSettings {
8665	s.ManifestConfirmConditionNotification = v
8666	return s
8667}
8668
8669// SetResponseSignalPreroll sets the ResponseSignalPreroll field's value.
8670func (s *EsamSettings) SetResponseSignalPreroll(v int64) *EsamSettings {
8671	s.ResponseSignalPreroll = &v
8672	return s
8673}
8674
8675// SetSignalProcessingNotification sets the SignalProcessingNotification field's value.
8676func (s *EsamSettings) SetSignalProcessingNotification(v *EsamSignalProcessingNotification) *EsamSettings {
8677	s.SignalProcessingNotification = v
8678	return s
8679}
8680
8681// ESAM SignalProcessingNotification data defined by OC-SP-ESAM-API-I03-131025.
8682type EsamSignalProcessingNotification struct {
8683	_ struct{} `type:"structure"`
8684
8685	// Provide your ESAM SignalProcessingNotification XML document inside your JSON
8686	// job settings. Form the XML document as per OC-SP-ESAM-API-I03-131025. The
8687	// transcoder will use the signal processing instructions in the message that
8688	// you supply. Provide your ESAM SignalProcessingNotification XML document inside
8689	// your JSON job settings. For your MPEG2-TS file outputs, if you want the service
8690	// to place SCTE-35 markers at the insertion points you specify in the XML document,
8691	// you must also enable SCTE-35 ESAM (scte35Esam). Note that you can either
8692	// specify an ESAM XML document or enable SCTE-35 passthrough. You can't do
8693	// both.
8694	SccXml *string `locationName:"sccXml" type:"string"`
8695}
8696
8697// String returns the string representation
8698func (s EsamSignalProcessingNotification) String() string {
8699	return awsutil.Prettify(s)
8700}
8701
8702// GoString returns the string representation
8703func (s EsamSignalProcessingNotification) GoString() string {
8704	return s.String()
8705}
8706
8707// SetSccXml sets the SccXml field's value.
8708func (s *EsamSignalProcessingNotification) SetSccXml(v string) *EsamSignalProcessingNotification {
8709	s.SccXml = &v
8710	return s
8711}
8712
8713// Settings for F4v container
8714type F4vSettings struct {
8715	_ struct{} `type:"structure"`
8716
8717	// If set to PROGRESSIVE_DOWNLOAD, the MOOV atom is relocated to the beginning
8718	// of the archive as required for progressive downloading. Otherwise it is placed
8719	// normally at the end.
8720	MoovPlacement *string `locationName:"moovPlacement" type:"string" enum:"F4vMoovPlacement"`
8721}
8722
8723// String returns the string representation
8724func (s F4vSettings) String() string {
8725	return awsutil.Prettify(s)
8726}
8727
8728// GoString returns the string representation
8729func (s F4vSettings) GoString() string {
8730	return s.String()
8731}
8732
8733// SetMoovPlacement sets the MoovPlacement field's value.
8734func (s *F4vSettings) SetMoovPlacement(v string) *F4vSettings {
8735	s.MoovPlacement = &v
8736	return s
8737}
8738
8739// Required when you set (Type) under (OutputGroups)>(OutputGroupSettings) to
8740// FILE_GROUP_SETTINGS.
8741type FileGroupSettings struct {
8742	_ struct{} `type:"structure"`
8743
8744	// Use Destination (Destination) to specify the S3 output location and the output
8745	// filename base. Destination accepts format identifiers. If you do not specify
8746	// the base filename in the URI, the service will use the filename of the input
8747	// file. If your job has multiple inputs, the service uses the filename of the
8748	// first input file.
8749	Destination *string `locationName:"destination" type:"string"`
8750
8751	// Settings associated with the destination. Will vary based on the type of
8752	// destination
8753	DestinationSettings *DestinationSettings `locationName:"destinationSettings" type:"structure"`
8754}
8755
8756// String returns the string representation
8757func (s FileGroupSettings) String() string {
8758	return awsutil.Prettify(s)
8759}
8760
8761// GoString returns the string representation
8762func (s FileGroupSettings) GoString() string {
8763	return s.String()
8764}
8765
8766// SetDestination sets the Destination field's value.
8767func (s *FileGroupSettings) SetDestination(v string) *FileGroupSettings {
8768	s.Destination = &v
8769	return s
8770}
8771
8772// SetDestinationSettings sets the DestinationSettings field's value.
8773func (s *FileGroupSettings) SetDestinationSettings(v *DestinationSettings) *FileGroupSettings {
8774	s.DestinationSettings = v
8775	return s
8776}
8777
8778// If your input captions are SCC, SMI, SRT, STL, TTML, or IMSC 1.1 in an xml
8779// file, specify the URI of the input caption source file. If your caption source
8780// is IMSC in an IMF package, use TrackSourceSettings instead of FileSoureSettings.
8781type FileSourceSettings struct {
8782	_ struct{} `type:"structure"`
8783
8784	// Specify whether this set of input captions appears in your outputs in both
8785	// 608 and 708 format. If you choose Upconvert (UPCONVERT), MediaConvert includes
8786	// the captions data in two ways: it passes the 608 data through using the 608
8787	// compatibility bytes fields of the 708 wrapper, and it also translates the
8788	// 608 data into 708.
8789	Convert608To708 *string `locationName:"convert608To708" type:"string" enum:"FileSourceConvert608To708"`
8790
8791	// Ignore this setting unless your input captions format is SCC. To have the
8792	// service compensate for differing frame rates between your input captions
8793	// and input video, specify the frame rate of the captions file. Specify this
8794	// value as a fraction, using the settings Framerate numerator (framerateNumerator)
8795	// and Framerate denominator (framerateDenominator). For example, you might
8796	// specify 24 / 1 for 24 fps, 25 / 1 for 25 fps, 24000 / 1001 for 23.976 fps,
8797	// or 30000 / 1001 for 29.97 fps.
8798	Framerate *CaptionSourceFramerate `locationName:"framerate" type:"structure"`
8799
8800	// External caption file used for loading captions. Accepted file extensions
8801	// are 'scc', 'ttml', 'dfxp', 'stl', 'srt', 'xml', and 'smi'.
8802	SourceFile *string `locationName:"sourceFile" min:"14" type:"string"`
8803
8804	// Specifies a time delta in seconds to offset the captions from the source
8805	// file.
8806	TimeDelta *int64 `locationName:"timeDelta" type:"integer"`
8807}
8808
8809// String returns the string representation
8810func (s FileSourceSettings) String() string {
8811	return awsutil.Prettify(s)
8812}
8813
8814// GoString returns the string representation
8815func (s FileSourceSettings) GoString() string {
8816	return s.String()
8817}
8818
8819// Validate inspects the fields of the type to determine if they are valid.
8820func (s *FileSourceSettings) Validate() error {
8821	invalidParams := request.ErrInvalidParams{Context: "FileSourceSettings"}
8822	if s.SourceFile != nil && len(*s.SourceFile) < 14 {
8823		invalidParams.Add(request.NewErrParamMinLen("SourceFile", 14))
8824	}
8825	if s.TimeDelta != nil && *s.TimeDelta < -2.147483648e+09 {
8826		invalidParams.Add(request.NewErrParamMinValue("TimeDelta", -2.147483648e+09))
8827	}
8828	if s.Framerate != nil {
8829		if err := s.Framerate.Validate(); err != nil {
8830			invalidParams.AddNested("Framerate", err.(request.ErrInvalidParams))
8831		}
8832	}
8833
8834	if invalidParams.Len() > 0 {
8835		return invalidParams
8836	}
8837	return nil
8838}
8839
8840// SetConvert608To708 sets the Convert608To708 field's value.
8841func (s *FileSourceSettings) SetConvert608To708(v string) *FileSourceSettings {
8842	s.Convert608To708 = &v
8843	return s
8844}
8845
8846// SetFramerate sets the Framerate field's value.
8847func (s *FileSourceSettings) SetFramerate(v *CaptionSourceFramerate) *FileSourceSettings {
8848	s.Framerate = v
8849	return s
8850}
8851
8852// SetSourceFile sets the SourceFile field's value.
8853func (s *FileSourceSettings) SetSourceFile(v string) *FileSourceSettings {
8854	s.SourceFile = &v
8855	return s
8856}
8857
8858// SetTimeDelta sets the TimeDelta field's value.
8859func (s *FileSourceSettings) SetTimeDelta(v int64) *FileSourceSettings {
8860	s.TimeDelta = &v
8861	return s
8862}
8863
8864type ForbiddenException struct {
8865	_            struct{}                  `type:"structure"`
8866	RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"`
8867
8868	Message_ *string `locationName:"message" type:"string"`
8869}
8870
8871// String returns the string representation
8872func (s ForbiddenException) String() string {
8873	return awsutil.Prettify(s)
8874}
8875
8876// GoString returns the string representation
8877func (s ForbiddenException) GoString() string {
8878	return s.String()
8879}
8880
8881func newErrorForbiddenException(v protocol.ResponseMetadata) error {
8882	return &ForbiddenException{
8883		RespMetadata: v,
8884	}
8885}
8886
8887// Code returns the exception type name.
8888func (s *ForbiddenException) Code() string {
8889	return "ForbiddenException"
8890}
8891
8892// Message returns the exception's message.
8893func (s *ForbiddenException) Message() string {
8894	if s.Message_ != nil {
8895		return *s.Message_
8896	}
8897	return ""
8898}
8899
8900// OrigErr always returns nil, satisfies awserr.Error interface.
8901func (s *ForbiddenException) OrigErr() error {
8902	return nil
8903}
8904
8905func (s *ForbiddenException) Error() string {
8906	return fmt.Sprintf("%s: %s", s.Code(), s.Message())
8907}
8908
8909// Status code returns the HTTP status code for the request's response error.
8910func (s *ForbiddenException) StatusCode() int {
8911	return s.RespMetadata.StatusCode
8912}
8913
8914// RequestID returns the service's response RequestID for request.
8915func (s *ForbiddenException) RequestID() string {
8916	return s.RespMetadata.RequestID
8917}
8918
8919// Required when you set (Codec) under (VideoDescription)>(CodecSettings) to
8920// the value FRAME_CAPTURE.
8921type FrameCaptureSettings struct {
8922	_ struct{} `type:"structure"`
8923
8924	// Frame capture will encode the first frame of the output stream, then one
8925	// frame every framerateDenominator/framerateNumerator seconds. For example,
8926	// settings of framerateNumerator = 1 and framerateDenominator = 3 (a rate of
8927	// 1/3 frame per second) will capture the first frame, then 1 frame every 3s.
8928	// Files will be named as filename.n.jpg where n is the 0-based sequence number
8929	// of each Capture.
8930	FramerateDenominator *int64 `locationName:"framerateDenominator" min:"1" type:"integer"`
8931
8932	// Frame capture will encode the first frame of the output stream, then one
8933	// frame every framerateDenominator/framerateNumerator seconds. For example,
8934	// settings of framerateNumerator = 1 and framerateDenominator = 3 (a rate of
8935	// 1/3 frame per second) will capture the first frame, then 1 frame every 3s.
8936	// Files will be named as filename.NNNNNNN.jpg where N is the 0-based frame
8937	// sequence number zero padded to 7 decimal places.
8938	FramerateNumerator *int64 `locationName:"framerateNumerator" min:"1" type:"integer"`
8939
8940	// Maximum number of captures (encoded jpg output files).
8941	MaxCaptures *int64 `locationName:"maxCaptures" min:"1" type:"integer"`
8942
8943	// JPEG Quality - a higher value equals higher quality.
8944	Quality *int64 `locationName:"quality" min:"1" type:"integer"`
8945}
8946
8947// String returns the string representation
8948func (s FrameCaptureSettings) String() string {
8949	return awsutil.Prettify(s)
8950}
8951
8952// GoString returns the string representation
8953func (s FrameCaptureSettings) GoString() string {
8954	return s.String()
8955}
8956
8957// Validate inspects the fields of the type to determine if they are valid.
8958func (s *FrameCaptureSettings) Validate() error {
8959	invalidParams := request.ErrInvalidParams{Context: "FrameCaptureSettings"}
8960	if s.FramerateDenominator != nil && *s.FramerateDenominator < 1 {
8961		invalidParams.Add(request.NewErrParamMinValue("FramerateDenominator", 1))
8962	}
8963	if s.FramerateNumerator != nil && *s.FramerateNumerator < 1 {
8964		invalidParams.Add(request.NewErrParamMinValue("FramerateNumerator", 1))
8965	}
8966	if s.MaxCaptures != nil && *s.MaxCaptures < 1 {
8967		invalidParams.Add(request.NewErrParamMinValue("MaxCaptures", 1))
8968	}
8969	if s.Quality != nil && *s.Quality < 1 {
8970		invalidParams.Add(request.NewErrParamMinValue("Quality", 1))
8971	}
8972
8973	if invalidParams.Len() > 0 {
8974		return invalidParams
8975	}
8976	return nil
8977}
8978
8979// SetFramerateDenominator sets the FramerateDenominator field's value.
8980func (s *FrameCaptureSettings) SetFramerateDenominator(v int64) *FrameCaptureSettings {
8981	s.FramerateDenominator = &v
8982	return s
8983}
8984
8985// SetFramerateNumerator sets the FramerateNumerator field's value.
8986func (s *FrameCaptureSettings) SetFramerateNumerator(v int64) *FrameCaptureSettings {
8987	s.FramerateNumerator = &v
8988	return s
8989}
8990
8991// SetMaxCaptures sets the MaxCaptures field's value.
8992func (s *FrameCaptureSettings) SetMaxCaptures(v int64) *FrameCaptureSettings {
8993	s.MaxCaptures = &v
8994	return s
8995}
8996
8997// SetQuality sets the Quality field's value.
8998func (s *FrameCaptureSettings) SetQuality(v int64) *FrameCaptureSettings {
8999	s.Quality = &v
9000	return s
9001}
9002
9003// Query a job by sending a request with the job ID.
9004type GetJobInput struct {
9005	_ struct{} `type:"structure"`
9006
9007	// the job ID of the job.
9008	//
9009	// Id is a required field
9010	Id *string `location:"uri" locationName:"id" type:"string" required:"true"`
9011}
9012
9013// String returns the string representation
9014func (s GetJobInput) String() string {
9015	return awsutil.Prettify(s)
9016}
9017
9018// GoString returns the string representation
9019func (s GetJobInput) GoString() string {
9020	return s.String()
9021}
9022
9023// Validate inspects the fields of the type to determine if they are valid.
9024func (s *GetJobInput) Validate() error {
9025	invalidParams := request.ErrInvalidParams{Context: "GetJobInput"}
9026	if s.Id == nil {
9027		invalidParams.Add(request.NewErrParamRequired("Id"))
9028	}
9029	if s.Id != nil && len(*s.Id) < 1 {
9030		invalidParams.Add(request.NewErrParamMinLen("Id", 1))
9031	}
9032
9033	if invalidParams.Len() > 0 {
9034		return invalidParams
9035	}
9036	return nil
9037}
9038
9039// SetId sets the Id field's value.
9040func (s *GetJobInput) SetId(v string) *GetJobInput {
9041	s.Id = &v
9042	return s
9043}
9044
9045// Successful get job requests will return an OK message and the job JSON.
9046type GetJobOutput struct {
9047	_ struct{} `type:"structure"`
9048
9049	// Each job converts an input file into an output file or files. For more information,
9050	// see the User Guide at https://docs.aws.amazon.com/mediaconvert/latest/ug/what-is.html
9051	Job *Job `locationName:"job" type:"structure"`
9052}
9053
9054// String returns the string representation
9055func (s GetJobOutput) String() string {
9056	return awsutil.Prettify(s)
9057}
9058
9059// GoString returns the string representation
9060func (s GetJobOutput) GoString() string {
9061	return s.String()
9062}
9063
9064// SetJob sets the Job field's value.
9065func (s *GetJobOutput) SetJob(v *Job) *GetJobOutput {
9066	s.Job = v
9067	return s
9068}
9069
9070// Query a job template by sending a request with the job template name.
9071type GetJobTemplateInput struct {
9072	_ struct{} `type:"structure"`
9073
9074	// The name of the job template.
9075	//
9076	// Name is a required field
9077	Name *string `location:"uri" locationName:"name" type:"string" required:"true"`
9078}
9079
9080// String returns the string representation
9081func (s GetJobTemplateInput) String() string {
9082	return awsutil.Prettify(s)
9083}
9084
9085// GoString returns the string representation
9086func (s GetJobTemplateInput) GoString() string {
9087	return s.String()
9088}
9089
9090// Validate inspects the fields of the type to determine if they are valid.
9091func (s *GetJobTemplateInput) Validate() error {
9092	invalidParams := request.ErrInvalidParams{Context: "GetJobTemplateInput"}
9093	if s.Name == nil {
9094		invalidParams.Add(request.NewErrParamRequired("Name"))
9095	}
9096	if s.Name != nil && len(*s.Name) < 1 {
9097		invalidParams.Add(request.NewErrParamMinLen("Name", 1))
9098	}
9099
9100	if invalidParams.Len() > 0 {
9101		return invalidParams
9102	}
9103	return nil
9104}
9105
9106// SetName sets the Name field's value.
9107func (s *GetJobTemplateInput) SetName(v string) *GetJobTemplateInput {
9108	s.Name = &v
9109	return s
9110}
9111
9112// Successful get job template requests will return an OK message and the job
9113// template JSON.
9114type GetJobTemplateOutput struct {
9115	_ struct{} `type:"structure"`
9116
9117	// A job template is a pre-made set of encoding instructions that you can use
9118	// to quickly create a job.
9119	JobTemplate *JobTemplate `locationName:"jobTemplate" type:"structure"`
9120}
9121
9122// String returns the string representation
9123func (s GetJobTemplateOutput) String() string {
9124	return awsutil.Prettify(s)
9125}
9126
9127// GoString returns the string representation
9128func (s GetJobTemplateOutput) GoString() string {
9129	return s.String()
9130}
9131
9132// SetJobTemplate sets the JobTemplate field's value.
9133func (s *GetJobTemplateOutput) SetJobTemplate(v *JobTemplate) *GetJobTemplateOutput {
9134	s.JobTemplate = v
9135	return s
9136}
9137
9138// Query a preset by sending a request with the preset name.
9139type GetPresetInput struct {
9140	_ struct{} `type:"structure"`
9141
9142	// The name of the preset.
9143	//
9144	// Name is a required field
9145	Name *string `location:"uri" locationName:"name" type:"string" required:"true"`
9146}
9147
9148// String returns the string representation
9149func (s GetPresetInput) String() string {
9150	return awsutil.Prettify(s)
9151}
9152
9153// GoString returns the string representation
9154func (s GetPresetInput) GoString() string {
9155	return s.String()
9156}
9157
9158// Validate inspects the fields of the type to determine if they are valid.
9159func (s *GetPresetInput) Validate() error {
9160	invalidParams := request.ErrInvalidParams{Context: "GetPresetInput"}
9161	if s.Name == nil {
9162		invalidParams.Add(request.NewErrParamRequired("Name"))
9163	}
9164	if s.Name != nil && len(*s.Name) < 1 {
9165		invalidParams.Add(request.NewErrParamMinLen("Name", 1))
9166	}
9167
9168	if invalidParams.Len() > 0 {
9169		return invalidParams
9170	}
9171	return nil
9172}
9173
9174// SetName sets the Name field's value.
9175func (s *GetPresetInput) SetName(v string) *GetPresetInput {
9176	s.Name = &v
9177	return s
9178}
9179
9180// Successful get preset requests will return an OK message and the preset JSON.
9181type GetPresetOutput struct {
9182	_ struct{} `type:"structure"`
9183
9184	// A preset is a collection of preconfigured media conversion settings that
9185	// you want MediaConvert to apply to the output during the conversion process.
9186	Preset *Preset `locationName:"preset" type:"structure"`
9187}
9188
9189// String returns the string representation
9190func (s GetPresetOutput) String() string {
9191	return awsutil.Prettify(s)
9192}
9193
9194// GoString returns the string representation
9195func (s GetPresetOutput) GoString() string {
9196	return s.String()
9197}
9198
9199// SetPreset sets the Preset field's value.
9200func (s *GetPresetOutput) SetPreset(v *Preset) *GetPresetOutput {
9201	s.Preset = v
9202	return s
9203}
9204
9205// Get information about a queue by sending a request with the queue name.
9206type GetQueueInput struct {
9207	_ struct{} `type:"structure"`
9208
9209	// The name of the queue that you want information about.
9210	//
9211	// Name is a required field
9212	Name *string `location:"uri" locationName:"name" type:"string" required:"true"`
9213}
9214
9215// String returns the string representation
9216func (s GetQueueInput) String() string {
9217	return awsutil.Prettify(s)
9218}
9219
9220// GoString returns the string representation
9221func (s GetQueueInput) GoString() string {
9222	return s.String()
9223}
9224
9225// Validate inspects the fields of the type to determine if they are valid.
9226func (s *GetQueueInput) Validate() error {
9227	invalidParams := request.ErrInvalidParams{Context: "GetQueueInput"}
9228	if s.Name == nil {
9229		invalidParams.Add(request.NewErrParamRequired("Name"))
9230	}
9231	if s.Name != nil && len(*s.Name) < 1 {
9232		invalidParams.Add(request.NewErrParamMinLen("Name", 1))
9233	}
9234
9235	if invalidParams.Len() > 0 {
9236		return invalidParams
9237	}
9238	return nil
9239}
9240
9241// SetName sets the Name field's value.
9242func (s *GetQueueInput) SetName(v string) *GetQueueInput {
9243	s.Name = &v
9244	return s
9245}
9246
9247// Successful get queue requests return an OK message and information about
9248// the queue in JSON.
9249type GetQueueOutput struct {
9250	_ struct{} `type:"structure"`
9251
9252	// You can use queues to manage the resources that are available to your AWS
9253	// account for running multiple transcoding jobs at the same time. If you don't
9254	// specify a queue, the service sends all jobs through the default queue. For
9255	// more information, see https://docs.aws.amazon.com/mediaconvert/latest/ug/working-with-queues.html.
9256	Queue *Queue `locationName:"queue" type:"structure"`
9257}
9258
9259// String returns the string representation
9260func (s GetQueueOutput) String() string {
9261	return awsutil.Prettify(s)
9262}
9263
9264// GoString returns the string representation
9265func (s GetQueueOutput) GoString() string {
9266	return s.String()
9267}
9268
9269// SetQueue sets the Queue field's value.
9270func (s *GetQueueOutput) SetQueue(v *Queue) *GetQueueOutput {
9271	s.Queue = v
9272	return s
9273}
9274
9275// Settings for quality-defined variable bitrate encoding with the H.264 codec.
9276// Required when you set Rate control mode to QVBR. Not valid when you set Rate
9277// control mode to a value other than QVBR, or when you don't define Rate control
9278// mode.
9279type H264QvbrSettings struct {
9280	_ struct{} `type:"structure"`
9281
9282	// Use this setting only when Rate control mode is QVBR and Quality tuning level
9283	// is Multi-pass HQ. For Max average bitrate values suited to the complexity
9284	// of your input video, the service limits the average bitrate of the video
9285	// part of this output to the value that you choose. That is, the total size
9286	// of the video element is less than or equal to the value you set multiplied
9287	// by the number of seconds of encoded output.
9288	MaxAverageBitrate *int64 `locationName:"maxAverageBitrate" min:"1000" type:"integer"`
9289
9290	// Required when you use QVBR rate control mode. That is, when you specify qvbrSettings
9291	// within h264Settings. Specify the general target quality level for this output,
9292	// from 1 to 10. Use higher numbers for greater quality. Level 10 results in
9293	// nearly lossless compression. The quality level for most broadcast-quality
9294	// transcodes is between 6 and 9. Optionally, to specify a value between whole
9295	// numbers, also provide a value for the setting qvbrQualityLevelFineTune. For
9296	// example, if you want your QVBR quality level to be 7.33, set qvbrQualityLevel
9297	// to 7 and set qvbrQualityLevelFineTune to .33.
9298	QvbrQualityLevel *int64 `locationName:"qvbrQualityLevel" min:"1" type:"integer"`
9299
9300	// Optional. Specify a value here to set the QVBR quality to a level that is
9301	// between whole numbers. For example, if you want your QVBR quality level to
9302	// be 7.33, set qvbrQualityLevel to 7 and set qvbrQualityLevelFineTune to .33.
9303	// MediaConvert rounds your QVBR quality level to the nearest third of a whole
9304	// number. For example, if you set qvbrQualityLevel to 7 and you set qvbrQualityLevelFineTune
9305	// to .25, your actual QVBR quality level is 7.33.
9306	QvbrQualityLevelFineTune *float64 `locationName:"qvbrQualityLevelFineTune" type:"double"`
9307}
9308
9309// String returns the string representation
9310func (s H264QvbrSettings) String() string {
9311	return awsutil.Prettify(s)
9312}
9313
9314// GoString returns the string representation
9315func (s H264QvbrSettings) GoString() string {
9316	return s.String()
9317}
9318
9319// Validate inspects the fields of the type to determine if they are valid.
9320func (s *H264QvbrSettings) Validate() error {
9321	invalidParams := request.ErrInvalidParams{Context: "H264QvbrSettings"}
9322	if s.MaxAverageBitrate != nil && *s.MaxAverageBitrate < 1000 {
9323		invalidParams.Add(request.NewErrParamMinValue("MaxAverageBitrate", 1000))
9324	}
9325	if s.QvbrQualityLevel != nil && *s.QvbrQualityLevel < 1 {
9326		invalidParams.Add(request.NewErrParamMinValue("QvbrQualityLevel", 1))
9327	}
9328
9329	if invalidParams.Len() > 0 {
9330		return invalidParams
9331	}
9332	return nil
9333}
9334
9335// SetMaxAverageBitrate sets the MaxAverageBitrate field's value.
9336func (s *H264QvbrSettings) SetMaxAverageBitrate(v int64) *H264QvbrSettings {
9337	s.MaxAverageBitrate = &v
9338	return s
9339}
9340
9341// SetQvbrQualityLevel sets the QvbrQualityLevel field's value.
9342func (s *H264QvbrSettings) SetQvbrQualityLevel(v int64) *H264QvbrSettings {
9343	s.QvbrQualityLevel = &v
9344	return s
9345}
9346
9347// SetQvbrQualityLevelFineTune sets the QvbrQualityLevelFineTune field's value.
9348func (s *H264QvbrSettings) SetQvbrQualityLevelFineTune(v float64) *H264QvbrSettings {
9349	s.QvbrQualityLevelFineTune = &v
9350	return s
9351}
9352
9353// Required when you set (Codec) under (VideoDescription)>(CodecSettings) to
9354// the value H_264.
9355type H264Settings struct {
9356	_ struct{} `type:"structure"`
9357
9358	// Keep the default value, Auto (AUTO), for this setting to have MediaConvert
9359	// automatically apply the best types of quantization for your video content.
9360	// When you want to apply your quantization settings manually, you must set
9361	// H264AdaptiveQuantization to a value other than Auto (AUTO). Use this setting
9362	// to specify the strength of any adaptive quantization filters that you enable.
9363	// If you don't want MediaConvert to do any adaptive quantization in this transcode,
9364	// set Adaptive quantization (H264AdaptiveQuantization) to Off (OFF). Related
9365	// settings: The value that you choose here applies to the following settings:
9366	// H264FlickerAdaptiveQuantization, H264SpatialAdaptiveQuantization, and H264TemporalAdaptiveQuantization.
9367	AdaptiveQuantization *string `locationName:"adaptiveQuantization" type:"string" enum:"H264AdaptiveQuantization"`
9368
9369	// Specify the average bitrate in bits per second. Required for VBR and CBR.
9370	// For MS Smooth outputs, bitrates must be unique when rounded down to the nearest
9371	// multiple of 1000.
9372	Bitrate *int64 `locationName:"bitrate" min:"1000" type:"integer"`
9373
9374	// Specify an H.264 level that is consistent with your output video settings.
9375	// If you aren't sure what level to specify, choose Auto (AUTO).
9376	CodecLevel *string `locationName:"codecLevel" type:"string" enum:"H264CodecLevel"`
9377
9378	// H.264 Profile. High 4:2:2 and 10-bit profiles are only available with the
9379	// AVC-I License.
9380	CodecProfile *string `locationName:"codecProfile" type:"string" enum:"H264CodecProfile"`
9381
9382	// Choose Adaptive to improve subjective video quality for high-motion content.
9383	// This will cause the service to use fewer B-frames (which infer information
9384	// based on other frames) for high-motion portions of the video and more B-frames
9385	// for low-motion portions. The maximum number of B-frames is limited by the
9386	// value you provide for the setting B frames between reference frames (numberBFramesBetweenReferenceFrames).
9387	DynamicSubGop *string `locationName:"dynamicSubGop" type:"string" enum:"H264DynamicSubGop"`
9388
9389	// Entropy encoding mode. Use CABAC (must be in Main or High profile) or CAVLC.
9390	EntropyEncoding *string `locationName:"entropyEncoding" type:"string" enum:"H264EntropyEncoding"`
9391
9392	// Keep the default value, PAFF, to have MediaConvert use PAFF encoding for
9393	// interlaced outputs. Choose Force field (FORCE_FIELD) to disable PAFF encoding
9394	// and create separate interlaced fields.
9395	FieldEncoding *string `locationName:"fieldEncoding" type:"string" enum:"H264FieldEncoding"`
9396
9397	// Only use this setting when you change the default value, AUTO, for the setting
9398	// H264AdaptiveQuantization. When you keep all defaults, excluding H264AdaptiveQuantization
9399	// and all other adaptive quantization from your JSON job specification, MediaConvert
9400	// automatically applies the best types of quantization for your video content.
9401	// When you set H264AdaptiveQuantization to a value other than AUTO, the default
9402	// value for H264FlickerAdaptiveQuantization is Disabled (DISABLED). Change
9403	// this value to Enabled (ENABLED) to reduce I-frame pop. I-frame pop appears
9404	// as a visual flicker that can arise when the encoder saves bits by copying
9405	// some macroblocks many times from frame to frame, and then refreshes them
9406	// at the I-frame. When you enable this setting, the encoder updates these macroblocks
9407	// slightly more often to smooth out the flicker. To manually enable or disable
9408	// H264FlickerAdaptiveQuantization, you must set Adaptive quantization (H264AdaptiveQuantization)
9409	// to a value other than AUTO.
9410	FlickerAdaptiveQuantization *string `locationName:"flickerAdaptiveQuantization" type:"string" enum:"H264FlickerAdaptiveQuantization"`
9411
9412	// If you are using the console, use the Framerate setting to specify the frame
9413	// rate for this output. If you want to keep the same frame rate as the input
9414	// video, choose Follow source. If you want to do frame rate conversion, choose
9415	// a frame rate from the dropdown list or choose Custom. The framerates shown
9416	// in the dropdown list are decimal approximations of fractions. If you choose
9417	// Custom, specify your frame rate as a fraction. If you are creating your transcoding
9418	// job specification as a JSON file without the console, use FramerateControl
9419	// to specify which value the service uses for the frame rate for this output.
9420	// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
9421	// from the input. Choose SPECIFIED if you want the service to use the frame
9422	// rate you specify in the settings FramerateNumerator and FramerateDenominator.
9423	FramerateControl *string `locationName:"framerateControl" type:"string" enum:"H264FramerateControl"`
9424
9425	// Choose the method that you want MediaConvert to use when increasing or decreasing
9426	// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
9427	// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
9428	// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
9429	// smooth picture, but might introduce undesirable video artifacts. For complex
9430	// frame rate conversions, especially if your source video has already been
9431	// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
9432	// motion-compensated interpolation. FrameFormer chooses the best conversion
9433	// method frame by frame. Note that using FrameFormer increases the transcoding
9434	// time and incurs a significant add-on cost.
9435	FramerateConversionAlgorithm *string `locationName:"framerateConversionAlgorithm" type:"string" enum:"H264FramerateConversionAlgorithm"`
9436
9437	// When you use the API for transcode jobs that use frame rate conversion, specify
9438	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
9439	// FramerateDenominator to specify the denominator of this fraction. In this
9440	// example, use 1001 for the value of FramerateDenominator. When you use the
9441	// console for transcode jobs that use frame rate conversion, provide the value
9442	// as a decimal number for Framerate. In this example, specify 23.976.
9443	FramerateDenominator *int64 `locationName:"framerateDenominator" min:"1" type:"integer"`
9444
9445	// When you use the API for transcode jobs that use frame rate conversion, specify
9446	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
9447	// FramerateNumerator to specify the numerator of this fraction. In this example,
9448	// use 24000 for the value of FramerateNumerator. When you use the console for
9449	// transcode jobs that use frame rate conversion, provide the value as a decimal
9450	// number for Framerate. In this example, specify 23.976.
9451	FramerateNumerator *int64 `locationName:"framerateNumerator" min:"1" type:"integer"`
9452
9453	// If enable, use reference B frames for GOP structures that have B frames >
9454	// 1.
9455	GopBReference *string `locationName:"gopBReference" type:"string" enum:"H264GopBReference"`
9456
9457	// Frequency of closed GOPs. In streaming applications, it is recommended that
9458	// this be set to 1 so a decoder joining mid-stream will receive an IDR frame
9459	// as quickly as possible. Setting this value to 0 will break output segmenting.
9460	GopClosedCadence *int64 `locationName:"gopClosedCadence" type:"integer"`
9461
9462	// GOP Length (keyframe interval) in frames or seconds. Must be greater than
9463	// zero.
9464	GopSize *float64 `locationName:"gopSize" type:"double"`
9465
9466	// Indicates if the GOP Size in H264 is specified in frames or seconds. If seconds
9467	// the system will convert the GOP Size into a frame count at run time.
9468	GopSizeUnits *string `locationName:"gopSizeUnits" type:"string" enum:"H264GopSizeUnits"`
9469
9470	// Percentage of the buffer that should initially be filled (HRD buffer model).
9471	HrdBufferInitialFillPercentage *int64 `locationName:"hrdBufferInitialFillPercentage" type:"integer"`
9472
9473	// Size of buffer (HRD buffer model) in bits. For example, enter five megabits
9474	// as 5000000.
9475	HrdBufferSize *int64 `locationName:"hrdBufferSize" type:"integer"`
9476
9477	// Choose the scan line type for the output. Keep the default value, Progressive
9478	// (PROGRESSIVE) to create a progressive output, regardless of the scan type
9479	// of your input. Use Top field first (TOP_FIELD) or Bottom field first (BOTTOM_FIELD)
9480	// to create an output that's interlaced with the same field polarity throughout.
9481	// Use Follow, default top (FOLLOW_TOP_FIELD) or Follow, default bottom (FOLLOW_BOTTOM_FIELD)
9482	// to produce outputs with the same field polarity as the source. For jobs that
9483	// have multiple inputs, the output field polarity might change over the course
9484	// of the output. Follow behavior depends on the input scan type. If the source
9485	// is interlaced, the output will be interlaced with the same polarity as the
9486	// source. If the source is progressive, the output will be interlaced with
9487	// top field bottom field first, depending on which of the Follow options you
9488	// choose.
9489	InterlaceMode *string `locationName:"interlaceMode" type:"string" enum:"H264InterlaceMode"`
9490
9491	// Maximum bitrate in bits/second. For example, enter five megabits per second
9492	// as 5000000. Required when Rate control mode is QVBR.
9493	MaxBitrate *int64 `locationName:"maxBitrate" min:"1000" type:"integer"`
9494
9495	// Enforces separation between repeated (cadence) I-frames and I-frames inserted
9496	// by Scene Change Detection. If a scene change I-frame is within I-interval
9497	// frames of a cadence I-frame, the GOP is shrunk and/or stretched to the scene
9498	// change I-frame. GOP stretch requires enabling lookahead as well as setting
9499	// I-interval. The normal cadence resumes for the next GOP. This setting is
9500	// only used when Scene Change Detect is enabled. Note: Maximum GOP stretch
9501	// = GOP size + Min-I-interval - 1
9502	MinIInterval *int64 `locationName:"minIInterval" type:"integer"`
9503
9504	// Number of B-frames between reference frames.
9505	NumberBFramesBetweenReferenceFrames *int64 `locationName:"numberBFramesBetweenReferenceFrames" type:"integer"`
9506
9507	// Number of reference frames to use. The encoder may use more than requested
9508	// if using B-frames and/or interlaced encoding.
9509	NumberReferenceFrames *int64 `locationName:"numberReferenceFrames" min:"1" type:"integer"`
9510
9511	// Optional. Specify how the service determines the pixel aspect ratio (PAR)
9512	// for this output. The default behavior, Follow source (INITIALIZE_FROM_SOURCE),
9513	// uses the PAR from your input video for your output. To specify a different
9514	// PAR in the console, choose any value other than Follow source. To specify
9515	// a different PAR by editing the JSON job specification, choose SPECIFIED.
9516	// When you choose SPECIFIED for this setting, you must also specify values
9517	// for the parNumerator and parDenominator settings.
9518	ParControl *string `locationName:"parControl" type:"string" enum:"H264ParControl"`
9519
9520	// Required when you set Pixel aspect ratio (parControl) to SPECIFIED. On the
9521	// console, this corresponds to any value other than Follow source. When you
9522	// specify an output pixel aspect ratio (PAR) that is different from your input
9523	// video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC
9524	// widescreen, you would specify the ratio 40:33. In this example, the value
9525	// for parDenominator is 33.
9526	ParDenominator *int64 `locationName:"parDenominator" min:"1" type:"integer"`
9527
9528	// Required when you set Pixel aspect ratio (parControl) to SPECIFIED. On the
9529	// console, this corresponds to any value other than Follow source. When you
9530	// specify an output pixel aspect ratio (PAR) that is different from your input
9531	// video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC
9532	// widescreen, you would specify the ratio 40:33. In this example, the value
9533	// for parNumerator is 40.
9534	ParNumerator *int64 `locationName:"parNumerator" min:"1" type:"integer"`
9535
9536	// Optional. Use Quality tuning level (qualityTuningLevel) to choose how you
9537	// want to trade off encoding speed for output video quality. The default behavior
9538	// is faster, lower quality, single-pass encoding.
9539	QualityTuningLevel *string `locationName:"qualityTuningLevel" type:"string" enum:"H264QualityTuningLevel"`
9540
9541	// Settings for quality-defined variable bitrate encoding with the H.264 codec.
9542	// Required when you set Rate control mode to QVBR. Not valid when you set Rate
9543	// control mode to a value other than QVBR, or when you don't define Rate control
9544	// mode.
9545	QvbrSettings *H264QvbrSettings `locationName:"qvbrSettings" type:"structure"`
9546
9547	// Use this setting to specify whether this output has a variable bitrate (VBR),
9548	// constant bitrate (CBR) or quality-defined variable bitrate (QVBR).
9549	RateControlMode *string `locationName:"rateControlMode" type:"string" enum:"H264RateControlMode"`
9550
9551	// Places a PPS header on each encoded picture, even if repeated.
9552	RepeatPps *string `locationName:"repeatPps" type:"string" enum:"H264RepeatPps"`
9553
9554	// Use this setting for interlaced outputs, when your output frame rate is half
9555	// of your input frame rate. In this situation, choose Optimized interlacing
9556	// (INTERLACED_OPTIMIZE) to create a better quality interlaced output. In this
9557	// case, each progressive frame from the input corresponds to an interlaced
9558	// field in the output. Keep the default value, Basic interlacing (INTERLACED),
9559	// for all other output frame rates. With basic interlacing, MediaConvert performs
9560	// any frame rate conversion first and then interlaces the frames. When you
9561	// choose Optimized interlacing and you set your output frame rate to a value
9562	// that isn't suitable for optimized interlacing, MediaConvert automatically
9563	// falls back to basic interlacing. Required settings: To use optimized interlacing,
9564	// you must set Telecine (telecine) to None (NONE) or Soft (SOFT). You can't
9565	// use optimized interlacing for hard telecine outputs. You must also set Interlace
9566	// mode (interlaceMode) to a value other than Progressive (PROGRESSIVE).
9567	ScanTypeConversionMode *string `locationName:"scanTypeConversionMode" type:"string" enum:"H264ScanTypeConversionMode"`
9568
9569	// Enable this setting to insert I-frames at scene changes that the service
9570	// automatically detects. This improves video quality and is enabled by default.
9571	// If this output uses QVBR, choose Transition detection (TRANSITION_DETECTION)
9572	// for further video quality improvement. For more information about QVBR, see
9573	// https://docs.aws.amazon.com/console/mediaconvert/cbr-vbr-qvbr.
9574	SceneChangeDetect *string `locationName:"sceneChangeDetect" type:"string" enum:"H264SceneChangeDetect"`
9575
9576	// Number of slices per picture. Must be less than or equal to the number of
9577	// macroblock rows for progressive pictures, and less than or equal to half
9578	// the number of macroblock rows for interlaced pictures.
9579	Slices *int64 `locationName:"slices" min:"1" type:"integer"`
9580
9581	// Ignore this setting unless your input frame rate is 23.976 or 24 frames per
9582	// second (fps). Enable slow PAL to create a 25 fps output. When you enable
9583	// slow PAL, MediaConvert relabels the video frames to 25 fps and resamples
9584	// your audio to keep it synchronized with the video. Note that enabling this
9585	// setting will slightly reduce the duration of your video. Required settings:
9586	// You must also set Framerate to 25. In your JSON job specification, set (framerateControl)
9587	// to (SPECIFIED), (framerateNumerator) to 25 and (framerateDenominator) to
9588	// 1.
9589	SlowPal *string `locationName:"slowPal" type:"string" enum:"H264SlowPal"`
9590
9591	// Ignore this setting unless you need to comply with a specification that requires
9592	// a specific value. If you don't have a specification requirement, we recommend
9593	// that you adjust the softness of your output by using a lower value for the
9594	// setting Sharpness (sharpness) or by enabling a noise reducer filter (noiseReducerFilter).
9595	// The Softness (softness) setting specifies the quantization matrices that
9596	// the encoder uses. Keep the default value, 0, for flat quantization. Choose
9597	// the value 1 or 16 to use the default JVT softening quantization matricies
9598	// from the H.264 specification. Choose a value from 17 to 128 to use planar
9599	// interpolation. Increasing values from 17 to 128 result in increasing reduction
9600	// of high-frequency data. The value 128 results in the softest video.
9601	Softness *int64 `locationName:"softness" type:"integer"`
9602
9603	// Only use this setting when you change the default value, Auto (AUTO), for
9604	// the setting H264AdaptiveQuantization. When you keep all defaults, excluding
9605	// H264AdaptiveQuantization and all other adaptive quantization from your JSON
9606	// job specification, MediaConvert automatically applies the best types of quantization
9607	// for your video content. When you set H264AdaptiveQuantization to a value
9608	// other than AUTO, the default value for H264SpatialAdaptiveQuantization is
9609	// Enabled (ENABLED). Keep this default value to adjust quantization within
9610	// each frame based on spatial variation of content complexity. When you enable
9611	// this feature, the encoder uses fewer bits on areas that can sustain more
9612	// distortion with no noticeable visual degradation and uses more bits on areas
9613	// where any small distortion will be noticeable. For example, complex textured
9614	// blocks are encoded with fewer bits and smooth textured blocks are encoded
9615	// with more bits. Enabling this feature will almost always improve your video
9616	// quality. Note, though, that this feature doesn't take into account where
9617	// the viewer's attention is likely to be. If viewers are likely to be focusing
9618	// their attention on a part of the screen with a lot of complex texture, you
9619	// might choose to set H264SpatialAdaptiveQuantization to Disabled (DISABLED).
9620	// Related setting: When you enable spatial adaptive quantization, set the value
9621	// for Adaptive quantization (H264AdaptiveQuantization) depending on your content.
9622	// For homogeneous content, such as cartoons and video games, set it to Low.
9623	// For content with a wider variety of textures, set it to High or Higher. To
9624	// manually enable or disable H264SpatialAdaptiveQuantization, you must set
9625	// Adaptive quantization (H264AdaptiveQuantization) to a value other than AUTO.
9626	SpatialAdaptiveQuantization *string `locationName:"spatialAdaptiveQuantization" type:"string" enum:"H264SpatialAdaptiveQuantization"`
9627
9628	// Produces a bitstream compliant with SMPTE RP-2027.
9629	Syntax *string `locationName:"syntax" type:"string" enum:"H264Syntax"`
9630
9631	// When you do frame rate conversion from 23.976 frames per second (fps) to
9632	// 29.97 fps, and your output scan type is interlaced, you can optionally enable
9633	// hard or soft telecine to create a smoother picture. Hard telecine (HARD)
9634	// produces a 29.97i output. Soft telecine (SOFT) produces an output with a
9635	// 23.976 output that signals to the video player device to do the conversion
9636	// during play back. When you keep the default value, None (NONE), MediaConvert
9637	// does a standard frame rate conversion to 29.97 without doing anything with
9638	// the field polarity to create a smoother picture.
9639	Telecine *string `locationName:"telecine" type:"string" enum:"H264Telecine"`
9640
9641	// Only use this setting when you change the default value, AUTO, for the setting
9642	// H264AdaptiveQuantization. When you keep all defaults, excluding H264AdaptiveQuantization
9643	// and all other adaptive quantization from your JSON job specification, MediaConvert
9644	// automatically applies the best types of quantization for your video content.
9645	// When you set H264AdaptiveQuantization to a value other than AUTO, the default
9646	// value for H264TemporalAdaptiveQuantization is Enabled (ENABLED). Keep this
9647	// default value to adjust quantization within each frame based on temporal
9648	// variation of content complexity. When you enable this feature, the encoder
9649	// uses fewer bits on areas of the frame that aren't moving and uses more bits
9650	// on complex objects with sharp edges that move a lot. For example, this feature
9651	// improves the readability of text tickers on newscasts and scoreboards on
9652	// sports matches. Enabling this feature will almost always improve your video
9653	// quality. Note, though, that this feature doesn't take into account where
9654	// the viewer's attention is likely to be. If viewers are likely to be focusing
9655	// their attention on a part of the screen that doesn't have moving objects
9656	// with sharp edges, such as sports athletes' faces, you might choose to set
9657	// H264TemporalAdaptiveQuantization to Disabled (DISABLED). Related setting:
9658	// When you enable temporal quantization, adjust the strength of the filter
9659	// with the setting Adaptive quantization (adaptiveQuantization). To manually
9660	// enable or disable H264TemporalAdaptiveQuantization, you must set Adaptive
9661	// quantization (H264AdaptiveQuantization) to a value other than AUTO.
9662	TemporalAdaptiveQuantization *string `locationName:"temporalAdaptiveQuantization" type:"string" enum:"H264TemporalAdaptiveQuantization"`
9663
9664	// Inserts timecode for each frame as 4 bytes of an unregistered SEI message.
9665	UnregisteredSeiTimecode *string `locationName:"unregisteredSeiTimecode" type:"string" enum:"H264UnregisteredSeiTimecode"`
9666}
9667
9668// String returns the string representation
9669func (s H264Settings) String() string {
9670	return awsutil.Prettify(s)
9671}
9672
9673// GoString returns the string representation
9674func (s H264Settings) GoString() string {
9675	return s.String()
9676}
9677
9678// Validate inspects the fields of the type to determine if they are valid.
9679func (s *H264Settings) Validate() error {
9680	invalidParams := request.ErrInvalidParams{Context: "H264Settings"}
9681	if s.Bitrate != nil && *s.Bitrate < 1000 {
9682		invalidParams.Add(request.NewErrParamMinValue("Bitrate", 1000))
9683	}
9684	if s.FramerateDenominator != nil && *s.FramerateDenominator < 1 {
9685		invalidParams.Add(request.NewErrParamMinValue("FramerateDenominator", 1))
9686	}
9687	if s.FramerateNumerator != nil && *s.FramerateNumerator < 1 {
9688		invalidParams.Add(request.NewErrParamMinValue("FramerateNumerator", 1))
9689	}
9690	if s.MaxBitrate != nil && *s.MaxBitrate < 1000 {
9691		invalidParams.Add(request.NewErrParamMinValue("MaxBitrate", 1000))
9692	}
9693	if s.NumberReferenceFrames != nil && *s.NumberReferenceFrames < 1 {
9694		invalidParams.Add(request.NewErrParamMinValue("NumberReferenceFrames", 1))
9695	}
9696	if s.ParDenominator != nil && *s.ParDenominator < 1 {
9697		invalidParams.Add(request.NewErrParamMinValue("ParDenominator", 1))
9698	}
9699	if s.ParNumerator != nil && *s.ParNumerator < 1 {
9700		invalidParams.Add(request.NewErrParamMinValue("ParNumerator", 1))
9701	}
9702	if s.Slices != nil && *s.Slices < 1 {
9703		invalidParams.Add(request.NewErrParamMinValue("Slices", 1))
9704	}
9705	if s.QvbrSettings != nil {
9706		if err := s.QvbrSettings.Validate(); err != nil {
9707			invalidParams.AddNested("QvbrSettings", err.(request.ErrInvalidParams))
9708		}
9709	}
9710
9711	if invalidParams.Len() > 0 {
9712		return invalidParams
9713	}
9714	return nil
9715}
9716
9717// SetAdaptiveQuantization sets the AdaptiveQuantization field's value.
9718func (s *H264Settings) SetAdaptiveQuantization(v string) *H264Settings {
9719	s.AdaptiveQuantization = &v
9720	return s
9721}
9722
9723// SetBitrate sets the Bitrate field's value.
9724func (s *H264Settings) SetBitrate(v int64) *H264Settings {
9725	s.Bitrate = &v
9726	return s
9727}
9728
9729// SetCodecLevel sets the CodecLevel field's value.
9730func (s *H264Settings) SetCodecLevel(v string) *H264Settings {
9731	s.CodecLevel = &v
9732	return s
9733}
9734
9735// SetCodecProfile sets the CodecProfile field's value.
9736func (s *H264Settings) SetCodecProfile(v string) *H264Settings {
9737	s.CodecProfile = &v
9738	return s
9739}
9740
9741// SetDynamicSubGop sets the DynamicSubGop field's value.
9742func (s *H264Settings) SetDynamicSubGop(v string) *H264Settings {
9743	s.DynamicSubGop = &v
9744	return s
9745}
9746
9747// SetEntropyEncoding sets the EntropyEncoding field's value.
9748func (s *H264Settings) SetEntropyEncoding(v string) *H264Settings {
9749	s.EntropyEncoding = &v
9750	return s
9751}
9752
9753// SetFieldEncoding sets the FieldEncoding field's value.
9754func (s *H264Settings) SetFieldEncoding(v string) *H264Settings {
9755	s.FieldEncoding = &v
9756	return s
9757}
9758
9759// SetFlickerAdaptiveQuantization sets the FlickerAdaptiveQuantization field's value.
9760func (s *H264Settings) SetFlickerAdaptiveQuantization(v string) *H264Settings {
9761	s.FlickerAdaptiveQuantization = &v
9762	return s
9763}
9764
9765// SetFramerateControl sets the FramerateControl field's value.
9766func (s *H264Settings) SetFramerateControl(v string) *H264Settings {
9767	s.FramerateControl = &v
9768	return s
9769}
9770
9771// SetFramerateConversionAlgorithm sets the FramerateConversionAlgorithm field's value.
9772func (s *H264Settings) SetFramerateConversionAlgorithm(v string) *H264Settings {
9773	s.FramerateConversionAlgorithm = &v
9774	return s
9775}
9776
9777// SetFramerateDenominator sets the FramerateDenominator field's value.
9778func (s *H264Settings) SetFramerateDenominator(v int64) *H264Settings {
9779	s.FramerateDenominator = &v
9780	return s
9781}
9782
9783// SetFramerateNumerator sets the FramerateNumerator field's value.
9784func (s *H264Settings) SetFramerateNumerator(v int64) *H264Settings {
9785	s.FramerateNumerator = &v
9786	return s
9787}
9788
9789// SetGopBReference sets the GopBReference field's value.
9790func (s *H264Settings) SetGopBReference(v string) *H264Settings {
9791	s.GopBReference = &v
9792	return s
9793}
9794
9795// SetGopClosedCadence sets the GopClosedCadence field's value.
9796func (s *H264Settings) SetGopClosedCadence(v int64) *H264Settings {
9797	s.GopClosedCadence = &v
9798	return s
9799}
9800
9801// SetGopSize sets the GopSize field's value.
9802func (s *H264Settings) SetGopSize(v float64) *H264Settings {
9803	s.GopSize = &v
9804	return s
9805}
9806
9807// SetGopSizeUnits sets the GopSizeUnits field's value.
9808func (s *H264Settings) SetGopSizeUnits(v string) *H264Settings {
9809	s.GopSizeUnits = &v
9810	return s
9811}
9812
9813// SetHrdBufferInitialFillPercentage sets the HrdBufferInitialFillPercentage field's value.
9814func (s *H264Settings) SetHrdBufferInitialFillPercentage(v int64) *H264Settings {
9815	s.HrdBufferInitialFillPercentage = &v
9816	return s
9817}
9818
9819// SetHrdBufferSize sets the HrdBufferSize field's value.
9820func (s *H264Settings) SetHrdBufferSize(v int64) *H264Settings {
9821	s.HrdBufferSize = &v
9822	return s
9823}
9824
9825// SetInterlaceMode sets the InterlaceMode field's value.
9826func (s *H264Settings) SetInterlaceMode(v string) *H264Settings {
9827	s.InterlaceMode = &v
9828	return s
9829}
9830
9831// SetMaxBitrate sets the MaxBitrate field's value.
9832func (s *H264Settings) SetMaxBitrate(v int64) *H264Settings {
9833	s.MaxBitrate = &v
9834	return s
9835}
9836
9837// SetMinIInterval sets the MinIInterval field's value.
9838func (s *H264Settings) SetMinIInterval(v int64) *H264Settings {
9839	s.MinIInterval = &v
9840	return s
9841}
9842
9843// SetNumberBFramesBetweenReferenceFrames sets the NumberBFramesBetweenReferenceFrames field's value.
9844func (s *H264Settings) SetNumberBFramesBetweenReferenceFrames(v int64) *H264Settings {
9845	s.NumberBFramesBetweenReferenceFrames = &v
9846	return s
9847}
9848
9849// SetNumberReferenceFrames sets the NumberReferenceFrames field's value.
9850func (s *H264Settings) SetNumberReferenceFrames(v int64) *H264Settings {
9851	s.NumberReferenceFrames = &v
9852	return s
9853}
9854
9855// SetParControl sets the ParControl field's value.
9856func (s *H264Settings) SetParControl(v string) *H264Settings {
9857	s.ParControl = &v
9858	return s
9859}
9860
9861// SetParDenominator sets the ParDenominator field's value.
9862func (s *H264Settings) SetParDenominator(v int64) *H264Settings {
9863	s.ParDenominator = &v
9864	return s
9865}
9866
9867// SetParNumerator sets the ParNumerator field's value.
9868func (s *H264Settings) SetParNumerator(v int64) *H264Settings {
9869	s.ParNumerator = &v
9870	return s
9871}
9872
9873// SetQualityTuningLevel sets the QualityTuningLevel field's value.
9874func (s *H264Settings) SetQualityTuningLevel(v string) *H264Settings {
9875	s.QualityTuningLevel = &v
9876	return s
9877}
9878
9879// SetQvbrSettings sets the QvbrSettings field's value.
9880func (s *H264Settings) SetQvbrSettings(v *H264QvbrSettings) *H264Settings {
9881	s.QvbrSettings = v
9882	return s
9883}
9884
9885// SetRateControlMode sets the RateControlMode field's value.
9886func (s *H264Settings) SetRateControlMode(v string) *H264Settings {
9887	s.RateControlMode = &v
9888	return s
9889}
9890
9891// SetRepeatPps sets the RepeatPps field's value.
9892func (s *H264Settings) SetRepeatPps(v string) *H264Settings {
9893	s.RepeatPps = &v
9894	return s
9895}
9896
9897// SetScanTypeConversionMode sets the ScanTypeConversionMode field's value.
9898func (s *H264Settings) SetScanTypeConversionMode(v string) *H264Settings {
9899	s.ScanTypeConversionMode = &v
9900	return s
9901}
9902
9903// SetSceneChangeDetect sets the SceneChangeDetect field's value.
9904func (s *H264Settings) SetSceneChangeDetect(v string) *H264Settings {
9905	s.SceneChangeDetect = &v
9906	return s
9907}
9908
9909// SetSlices sets the Slices field's value.
9910func (s *H264Settings) SetSlices(v int64) *H264Settings {
9911	s.Slices = &v
9912	return s
9913}
9914
9915// SetSlowPal sets the SlowPal field's value.
9916func (s *H264Settings) SetSlowPal(v string) *H264Settings {
9917	s.SlowPal = &v
9918	return s
9919}
9920
9921// SetSoftness sets the Softness field's value.
9922func (s *H264Settings) SetSoftness(v int64) *H264Settings {
9923	s.Softness = &v
9924	return s
9925}
9926
9927// SetSpatialAdaptiveQuantization sets the SpatialAdaptiveQuantization field's value.
9928func (s *H264Settings) SetSpatialAdaptiveQuantization(v string) *H264Settings {
9929	s.SpatialAdaptiveQuantization = &v
9930	return s
9931}
9932
9933// SetSyntax sets the Syntax field's value.
9934func (s *H264Settings) SetSyntax(v string) *H264Settings {
9935	s.Syntax = &v
9936	return s
9937}
9938
9939// SetTelecine sets the Telecine field's value.
9940func (s *H264Settings) SetTelecine(v string) *H264Settings {
9941	s.Telecine = &v
9942	return s
9943}
9944
9945// SetTemporalAdaptiveQuantization sets the TemporalAdaptiveQuantization field's value.
9946func (s *H264Settings) SetTemporalAdaptiveQuantization(v string) *H264Settings {
9947	s.TemporalAdaptiveQuantization = &v
9948	return s
9949}
9950
9951// SetUnregisteredSeiTimecode sets the UnregisteredSeiTimecode field's value.
9952func (s *H264Settings) SetUnregisteredSeiTimecode(v string) *H264Settings {
9953	s.UnregisteredSeiTimecode = &v
9954	return s
9955}
9956
9957// Settings for quality-defined variable bitrate encoding with the H.265 codec.
9958// Required when you set Rate control mode to QVBR. Not valid when you set Rate
9959// control mode to a value other than QVBR, or when you don't define Rate control
9960// mode.
9961type H265QvbrSettings struct {
9962	_ struct{} `type:"structure"`
9963
9964	// Use this setting only when Rate control mode is QVBR and Quality tuning level
9965	// is Multi-pass HQ. For Max average bitrate values suited to the complexity
9966	// of your input video, the service limits the average bitrate of the video
9967	// part of this output to the value that you choose. That is, the total size
9968	// of the video element is less than or equal to the value you set multiplied
9969	// by the number of seconds of encoded output.
9970	MaxAverageBitrate *int64 `locationName:"maxAverageBitrate" min:"1000" type:"integer"`
9971
9972	// Required when you use QVBR rate control mode. That is, when you specify qvbrSettings
9973	// within h265Settings. Specify the general target quality level for this output,
9974	// from 1 to 10. Use higher numbers for greater quality. Level 10 results in
9975	// nearly lossless compression. The quality level for most broadcast-quality
9976	// transcodes is between 6 and 9. Optionally, to specify a value between whole
9977	// numbers, also provide a value for the setting qvbrQualityLevelFineTune. For
9978	// example, if you want your QVBR quality level to be 7.33, set qvbrQualityLevel
9979	// to 7 and set qvbrQualityLevelFineTune to .33.
9980	QvbrQualityLevel *int64 `locationName:"qvbrQualityLevel" min:"1" type:"integer"`
9981
9982	// Optional. Specify a value here to set the QVBR quality to a level that is
9983	// between whole numbers. For example, if you want your QVBR quality level to
9984	// be 7.33, set qvbrQualityLevel to 7 and set qvbrQualityLevelFineTune to .33.
9985	// MediaConvert rounds your QVBR quality level to the nearest third of a whole
9986	// number. For example, if you set qvbrQualityLevel to 7 and you set qvbrQualityLevelFineTune
9987	// to .25, your actual QVBR quality level is 7.33.
9988	QvbrQualityLevelFineTune *float64 `locationName:"qvbrQualityLevelFineTune" type:"double"`
9989}
9990
9991// String returns the string representation
9992func (s H265QvbrSettings) String() string {
9993	return awsutil.Prettify(s)
9994}
9995
9996// GoString returns the string representation
9997func (s H265QvbrSettings) GoString() string {
9998	return s.String()
9999}
10000
10001// Validate inspects the fields of the type to determine if they are valid.
10002func (s *H265QvbrSettings) Validate() error {
10003	invalidParams := request.ErrInvalidParams{Context: "H265QvbrSettings"}
10004	if s.MaxAverageBitrate != nil && *s.MaxAverageBitrate < 1000 {
10005		invalidParams.Add(request.NewErrParamMinValue("MaxAverageBitrate", 1000))
10006	}
10007	if s.QvbrQualityLevel != nil && *s.QvbrQualityLevel < 1 {
10008		invalidParams.Add(request.NewErrParamMinValue("QvbrQualityLevel", 1))
10009	}
10010
10011	if invalidParams.Len() > 0 {
10012		return invalidParams
10013	}
10014	return nil
10015}
10016
10017// SetMaxAverageBitrate sets the MaxAverageBitrate field's value.
10018func (s *H265QvbrSettings) SetMaxAverageBitrate(v int64) *H265QvbrSettings {
10019	s.MaxAverageBitrate = &v
10020	return s
10021}
10022
10023// SetQvbrQualityLevel sets the QvbrQualityLevel field's value.
10024func (s *H265QvbrSettings) SetQvbrQualityLevel(v int64) *H265QvbrSettings {
10025	s.QvbrQualityLevel = &v
10026	return s
10027}
10028
10029// SetQvbrQualityLevelFineTune sets the QvbrQualityLevelFineTune field's value.
10030func (s *H265QvbrSettings) SetQvbrQualityLevelFineTune(v float64) *H265QvbrSettings {
10031	s.QvbrQualityLevelFineTune = &v
10032	return s
10033}
10034
10035// Settings for H265 codec
10036type H265Settings struct {
10037	_ struct{} `type:"structure"`
10038
10039	// Specify the strength of any adaptive quantization filters that you enable.
10040	// The value that you choose here applies to the following settings: Flicker
10041	// adaptive quantization (flickerAdaptiveQuantization), Spatial adaptive quantization
10042	// (spatialAdaptiveQuantization), and Temporal adaptive quantization (temporalAdaptiveQuantization).
10043	AdaptiveQuantization *string `locationName:"adaptiveQuantization" type:"string" enum:"H265AdaptiveQuantization"`
10044
10045	// Enables Alternate Transfer Function SEI message for outputs using Hybrid
10046	// Log Gamma (HLG) Electro-Optical Transfer Function (EOTF).
10047	AlternateTransferFunctionSei *string `locationName:"alternateTransferFunctionSei" type:"string" enum:"H265AlternateTransferFunctionSei"`
10048
10049	// Specify the average bitrate in bits per second. Required for VBR and CBR.
10050	// For MS Smooth outputs, bitrates must be unique when rounded down to the nearest
10051	// multiple of 1000.
10052	Bitrate *int64 `locationName:"bitrate" min:"1000" type:"integer"`
10053
10054	// H.265 Level.
10055	CodecLevel *string `locationName:"codecLevel" type:"string" enum:"H265CodecLevel"`
10056
10057	// Represents the Profile and Tier, per the HEVC (H.265) specification. Selections
10058	// are grouped as [Profile] / [Tier], so "Main/High" represents Main Profile
10059	// with High Tier. 4:2:2 profiles are only available with the HEVC 4:2:2 License.
10060	CodecProfile *string `locationName:"codecProfile" type:"string" enum:"H265CodecProfile"`
10061
10062	// Choose Adaptive to improve subjective video quality for high-motion content.
10063	// This will cause the service to use fewer B-frames (which infer information
10064	// based on other frames) for high-motion portions of the video and more B-frames
10065	// for low-motion portions. The maximum number of B-frames is limited by the
10066	// value you provide for the setting B frames between reference frames (numberBFramesBetweenReferenceFrames).
10067	DynamicSubGop *string `locationName:"dynamicSubGop" type:"string" enum:"H265DynamicSubGop"`
10068
10069	// Enable this setting to have the encoder reduce I-frame pop. I-frame pop appears
10070	// as a visual flicker that can arise when the encoder saves bits by copying
10071	// some macroblocks many times from frame to frame, and then refreshes them
10072	// at the I-frame. When you enable this setting, the encoder updates these macroblocks
10073	// slightly more often to smooth out the flicker. This setting is disabled by
10074	// default. Related setting: In addition to enabling this setting, you must
10075	// also set adaptiveQuantization to a value other than Off (OFF).
10076	FlickerAdaptiveQuantization *string `locationName:"flickerAdaptiveQuantization" type:"string" enum:"H265FlickerAdaptiveQuantization"`
10077
10078	// If you are using the console, use the Framerate setting to specify the frame
10079	// rate for this output. If you want to keep the same frame rate as the input
10080	// video, choose Follow source. If you want to do frame rate conversion, choose
10081	// a frame rate from the dropdown list or choose Custom. The framerates shown
10082	// in the dropdown list are decimal approximations of fractions. If you choose
10083	// Custom, specify your frame rate as a fraction. If you are creating your transcoding
10084	// job specification as a JSON file without the console, use FramerateControl
10085	// to specify which value the service uses for the frame rate for this output.
10086	// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
10087	// from the input. Choose SPECIFIED if you want the service to use the frame
10088	// rate you specify in the settings FramerateNumerator and FramerateDenominator.
10089	FramerateControl *string `locationName:"framerateControl" type:"string" enum:"H265FramerateControl"`
10090
10091	// Choose the method that you want MediaConvert to use when increasing or decreasing
10092	// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
10093	// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
10094	// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
10095	// smooth picture, but might introduce undesirable video artifacts. For complex
10096	// frame rate conversions, especially if your source video has already been
10097	// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
10098	// motion-compensated interpolation. FrameFormer chooses the best conversion
10099	// method frame by frame. Note that using FrameFormer increases the transcoding
10100	// time and incurs a significant add-on cost.
10101	FramerateConversionAlgorithm *string `locationName:"framerateConversionAlgorithm" type:"string" enum:"H265FramerateConversionAlgorithm"`
10102
10103	// When you use the API for transcode jobs that use frame rate conversion, specify
10104	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
10105	// FramerateDenominator to specify the denominator of this fraction. In this
10106	// example, use 1001 for the value of FramerateDenominator. When you use the
10107	// console for transcode jobs that use frame rate conversion, provide the value
10108	// as a decimal number for Framerate. In this example, specify 23.976.
10109	FramerateDenominator *int64 `locationName:"framerateDenominator" min:"1" type:"integer"`
10110
10111	// When you use the API for transcode jobs that use frame rate conversion, specify
10112	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
10113	// FramerateNumerator to specify the numerator of this fraction. In this example,
10114	// use 24000 for the value of FramerateNumerator. When you use the console for
10115	// transcode jobs that use frame rate conversion, provide the value as a decimal
10116	// number for Framerate. In this example, specify 23.976.
10117	FramerateNumerator *int64 `locationName:"framerateNumerator" min:"1" type:"integer"`
10118
10119	// If enable, use reference B frames for GOP structures that have B frames >
10120	// 1.
10121	GopBReference *string `locationName:"gopBReference" type:"string" enum:"H265GopBReference"`
10122
10123	// Frequency of closed GOPs. In streaming applications, it is recommended that
10124	// this be set to 1 so a decoder joining mid-stream will receive an IDR frame
10125	// as quickly as possible. Setting this value to 0 will break output segmenting.
10126	GopClosedCadence *int64 `locationName:"gopClosedCadence" type:"integer"`
10127
10128	// GOP Length (keyframe interval) in frames or seconds. Must be greater than
10129	// zero.
10130	GopSize *float64 `locationName:"gopSize" type:"double"`
10131
10132	// Indicates if the GOP Size in H265 is specified in frames or seconds. If seconds
10133	// the system will convert the GOP Size into a frame count at run time.
10134	GopSizeUnits *string `locationName:"gopSizeUnits" type:"string" enum:"H265GopSizeUnits"`
10135
10136	// Percentage of the buffer that should initially be filled (HRD buffer model).
10137	HrdBufferInitialFillPercentage *int64 `locationName:"hrdBufferInitialFillPercentage" type:"integer"`
10138
10139	// Size of buffer (HRD buffer model) in bits. For example, enter five megabits
10140	// as 5000000.
10141	HrdBufferSize *int64 `locationName:"hrdBufferSize" type:"integer"`
10142
10143	// Choose the scan line type for the output. Keep the default value, Progressive
10144	// (PROGRESSIVE) to create a progressive output, regardless of the scan type
10145	// of your input. Use Top field first (TOP_FIELD) or Bottom field first (BOTTOM_FIELD)
10146	// to create an output that's interlaced with the same field polarity throughout.
10147	// Use Follow, default top (FOLLOW_TOP_FIELD) or Follow, default bottom (FOLLOW_BOTTOM_FIELD)
10148	// to produce outputs with the same field polarity as the source. For jobs that
10149	// have multiple inputs, the output field polarity might change over the course
10150	// of the output. Follow behavior depends on the input scan type. If the source
10151	// is interlaced, the output will be interlaced with the same polarity as the
10152	// source. If the source is progressive, the output will be interlaced with
10153	// top field bottom field first, depending on which of the Follow options you
10154	// choose.
10155	InterlaceMode *string `locationName:"interlaceMode" type:"string" enum:"H265InterlaceMode"`
10156
10157	// Maximum bitrate in bits/second. For example, enter five megabits per second
10158	// as 5000000. Required when Rate control mode is QVBR.
10159	MaxBitrate *int64 `locationName:"maxBitrate" min:"1000" type:"integer"`
10160
10161	// Enforces separation between repeated (cadence) I-frames and I-frames inserted
10162	// by Scene Change Detection. If a scene change I-frame is within I-interval
10163	// frames of a cadence I-frame, the GOP is shrunk and/or stretched to the scene
10164	// change I-frame. GOP stretch requires enabling lookahead as well as setting
10165	// I-interval. The normal cadence resumes for the next GOP. This setting is
10166	// only used when Scene Change Detect is enabled. Note: Maximum GOP stretch
10167	// = GOP size + Min-I-interval - 1
10168	MinIInterval *int64 `locationName:"minIInterval" type:"integer"`
10169
10170	// Number of B-frames between reference frames.
10171	NumberBFramesBetweenReferenceFrames *int64 `locationName:"numberBFramesBetweenReferenceFrames" type:"integer"`
10172
10173	// Number of reference frames to use. The encoder may use more than requested
10174	// if using B-frames and/or interlaced encoding.
10175	NumberReferenceFrames *int64 `locationName:"numberReferenceFrames" min:"1" type:"integer"`
10176
10177	// Optional. Specify how the service determines the pixel aspect ratio (PAR)
10178	// for this output. The default behavior, Follow source (INITIALIZE_FROM_SOURCE),
10179	// uses the PAR from your input video for your output. To specify a different
10180	// PAR in the console, choose any value other than Follow source. To specify
10181	// a different PAR by editing the JSON job specification, choose SPECIFIED.
10182	// When you choose SPECIFIED for this setting, you must also specify values
10183	// for the parNumerator and parDenominator settings.
10184	ParControl *string `locationName:"parControl" type:"string" enum:"H265ParControl"`
10185
10186	// Required when you set Pixel aspect ratio (parControl) to SPECIFIED. On the
10187	// console, this corresponds to any value other than Follow source. When you
10188	// specify an output pixel aspect ratio (PAR) that is different from your input
10189	// video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC
10190	// widescreen, you would specify the ratio 40:33. In this example, the value
10191	// for parDenominator is 33.
10192	ParDenominator *int64 `locationName:"parDenominator" min:"1" type:"integer"`
10193
10194	// Required when you set Pixel aspect ratio (parControl) to SPECIFIED. On the
10195	// console, this corresponds to any value other than Follow source. When you
10196	// specify an output pixel aspect ratio (PAR) that is different from your input
10197	// video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC
10198	// widescreen, you would specify the ratio 40:33. In this example, the value
10199	// for parNumerator is 40.
10200	ParNumerator *int64 `locationName:"parNumerator" min:"1" type:"integer"`
10201
10202	// Optional. Use Quality tuning level (qualityTuningLevel) to choose how you
10203	// want to trade off encoding speed for output video quality. The default behavior
10204	// is faster, lower quality, single-pass encoding.
10205	QualityTuningLevel *string `locationName:"qualityTuningLevel" type:"string" enum:"H265QualityTuningLevel"`
10206
10207	// Settings for quality-defined variable bitrate encoding with the H.265 codec.
10208	// Required when you set Rate control mode to QVBR. Not valid when you set Rate
10209	// control mode to a value other than QVBR, or when you don't define Rate control
10210	// mode.
10211	QvbrSettings *H265QvbrSettings `locationName:"qvbrSettings" type:"structure"`
10212
10213	// Use this setting to specify whether this output has a variable bitrate (VBR),
10214	// constant bitrate (CBR) or quality-defined variable bitrate (QVBR).
10215	RateControlMode *string `locationName:"rateControlMode" type:"string" enum:"H265RateControlMode"`
10216
10217	// Specify Sample Adaptive Offset (SAO) filter strength. Adaptive mode dynamically
10218	// selects best strength based on content
10219	SampleAdaptiveOffsetFilterMode *string `locationName:"sampleAdaptiveOffsetFilterMode" type:"string" enum:"H265SampleAdaptiveOffsetFilterMode"`
10220
10221	// Use this setting for interlaced outputs, when your output frame rate is half
10222	// of your input frame rate. In this situation, choose Optimized interlacing
10223	// (INTERLACED_OPTIMIZE) to create a better quality interlaced output. In this
10224	// case, each progressive frame from the input corresponds to an interlaced
10225	// field in the output. Keep the default value, Basic interlacing (INTERLACED),
10226	// for all other output frame rates. With basic interlacing, MediaConvert performs
10227	// any frame rate conversion first and then interlaces the frames. When you
10228	// choose Optimized interlacing and you set your output frame rate to a value
10229	// that isn't suitable for optimized interlacing, MediaConvert automatically
10230	// falls back to basic interlacing. Required settings: To use optimized interlacing,
10231	// you must set Telecine (telecine) to None (NONE) or Soft (SOFT). You can't
10232	// use optimized interlacing for hard telecine outputs. You must also set Interlace
10233	// mode (interlaceMode) to a value other than Progressive (PROGRESSIVE).
10234	ScanTypeConversionMode *string `locationName:"scanTypeConversionMode" type:"string" enum:"H265ScanTypeConversionMode"`
10235
10236	// Enable this setting to insert I-frames at scene changes that the service
10237	// automatically detects. This improves video quality and is enabled by default.
10238	// If this output uses QVBR, choose Transition detection (TRANSITION_DETECTION)
10239	// for further video quality improvement. For more information about QVBR, see
10240	// https://docs.aws.amazon.com/console/mediaconvert/cbr-vbr-qvbr.
10241	SceneChangeDetect *string `locationName:"sceneChangeDetect" type:"string" enum:"H265SceneChangeDetect"`
10242
10243	// Number of slices per picture. Must be less than or equal to the number of
10244	// macroblock rows for progressive pictures, and less than or equal to half
10245	// the number of macroblock rows for interlaced pictures.
10246	Slices *int64 `locationName:"slices" min:"1" type:"integer"`
10247
10248	// Ignore this setting unless your input frame rate is 23.976 or 24 frames per
10249	// second (fps). Enable slow PAL to create a 25 fps output. When you enable
10250	// slow PAL, MediaConvert relabels the video frames to 25 fps and resamples
10251	// your audio to keep it synchronized with the video. Note that enabling this
10252	// setting will slightly reduce the duration of your video. Required settings:
10253	// You must also set Framerate to 25. In your JSON job specification, set (framerateControl)
10254	// to (SPECIFIED), (framerateNumerator) to 25 and (framerateDenominator) to
10255	// 1.
10256	SlowPal *string `locationName:"slowPal" type:"string" enum:"H265SlowPal"`
10257
10258	// Keep the default value, Enabled (ENABLED), to adjust quantization within
10259	// each frame based on spatial variation of content complexity. When you enable
10260	// this feature, the encoder uses fewer bits on areas that can sustain more
10261	// distortion with no noticeable visual degradation and uses more bits on areas
10262	// where any small distortion will be noticeable. For example, complex textured
10263	// blocks are encoded with fewer bits and smooth textured blocks are encoded
10264	// with more bits. Enabling this feature will almost always improve your video
10265	// quality. Note, though, that this feature doesn't take into account where
10266	// the viewer's attention is likely to be. If viewers are likely to be focusing
10267	// their attention on a part of the screen with a lot of complex texture, you
10268	// might choose to disable this feature. Related setting: When you enable spatial
10269	// adaptive quantization, set the value for Adaptive quantization (adaptiveQuantization)
10270	// depending on your content. For homogeneous content, such as cartoons and
10271	// video games, set it to Low. For content with a wider variety of textures,
10272	// set it to High or Higher.
10273	SpatialAdaptiveQuantization *string `locationName:"spatialAdaptiveQuantization" type:"string" enum:"H265SpatialAdaptiveQuantization"`
10274
10275	// This field applies only if the Streams > Advanced > Framerate (framerate)
10276	// field is set to 29.970. This field works with the Streams > Advanced > Preprocessors
10277	// > Deinterlacer field (deinterlace_mode) and the Streams > Advanced > Interlaced
10278	// Mode field (interlace_mode) to identify the scan type for the output: Progressive,
10279	// Interlaced, Hard Telecine or Soft Telecine. - Hard: produces 29.97i output
10280	// from 23.976 input. - Soft: produces 23.976; the player converts this output
10281	// to 29.97i.
10282	Telecine *string `locationName:"telecine" type:"string" enum:"H265Telecine"`
10283
10284	// Keep the default value, Enabled (ENABLED), to adjust quantization within
10285	// each frame based on temporal variation of content complexity. When you enable
10286	// this feature, the encoder uses fewer bits on areas of the frame that aren't
10287	// moving and uses more bits on complex objects with sharp edges that move a
10288	// lot. For example, this feature improves the readability of text tickers on
10289	// newscasts and scoreboards on sports matches. Enabling this feature will almost
10290	// always improve your video quality. Note, though, that this feature doesn't
10291	// take into account where the viewer's attention is likely to be. If viewers
10292	// are likely to be focusing their attention on a part of the screen that doesn't
10293	// have moving objects with sharp edges, such as sports athletes' faces, you
10294	// might choose to disable this feature. Related setting: When you enable temporal
10295	// quantization, adjust the strength of the filter with the setting Adaptive
10296	// quantization (adaptiveQuantization).
10297	TemporalAdaptiveQuantization *string `locationName:"temporalAdaptiveQuantization" type:"string" enum:"H265TemporalAdaptiveQuantization"`
10298
10299	// Enables temporal layer identifiers in the encoded bitstream. Up to 3 layers
10300	// are supported depending on GOP structure: I- and P-frames form one layer,
10301	// reference B-frames can form a second layer and non-reference b-frames can
10302	// form a third layer. Decoders can optionally decode only the lower temporal
10303	// layers to generate a lower frame rate output. For example, given a bitstream
10304	// with temporal IDs and with b-frames = 1 (i.e. IbPbPb display order), a decoder
10305	// could decode all the frames for full frame rate output or only the I and
10306	// P frames (lowest temporal layer) for a half frame rate output.
10307	TemporalIds *string `locationName:"temporalIds" type:"string" enum:"H265TemporalIds"`
10308
10309	// Enable use of tiles, allowing horizontal as well as vertical subdivision
10310	// of the encoded pictures.
10311	Tiles *string `locationName:"tiles" type:"string" enum:"H265Tiles"`
10312
10313	// Inserts timecode for each frame as 4 bytes of an unregistered SEI message.
10314	UnregisteredSeiTimecode *string `locationName:"unregisteredSeiTimecode" type:"string" enum:"H265UnregisteredSeiTimecode"`
10315
10316	// If the location of parameter set NAL units doesn't matter in your workflow,
10317	// ignore this setting. Use this setting only with CMAF or DASH outputs, or
10318	// with standalone file outputs in an MPEG-4 container (MP4 outputs). Choose
10319	// HVC1 to mark your output as HVC1. This makes your output compliant with the
10320	// following specification: ISO IECJTC1 SC29 N13798 Text ISO/IEC FDIS 14496-15
10321	// 3rd Edition. For these outputs, the service stores parameter set NAL units
10322	// in the sample headers but not in the samples directly. For MP4 outputs, when
10323	// you choose HVC1, your output video might not work properly with some downstream
10324	// systems and video players. The service defaults to marking your output as
10325	// HEV1. For these outputs, the service writes parameter set NAL units directly
10326	// into the samples.
10327	WriteMp4PackagingType *string `locationName:"writeMp4PackagingType" type:"string" enum:"H265WriteMp4PackagingType"`
10328}
10329
10330// String returns the string representation
10331func (s H265Settings) String() string {
10332	return awsutil.Prettify(s)
10333}
10334
10335// GoString returns the string representation
10336func (s H265Settings) GoString() string {
10337	return s.String()
10338}
10339
10340// Validate inspects the fields of the type to determine if they are valid.
10341func (s *H265Settings) Validate() error {
10342	invalidParams := request.ErrInvalidParams{Context: "H265Settings"}
10343	if s.Bitrate != nil && *s.Bitrate < 1000 {
10344		invalidParams.Add(request.NewErrParamMinValue("Bitrate", 1000))
10345	}
10346	if s.FramerateDenominator != nil && *s.FramerateDenominator < 1 {
10347		invalidParams.Add(request.NewErrParamMinValue("FramerateDenominator", 1))
10348	}
10349	if s.FramerateNumerator != nil && *s.FramerateNumerator < 1 {
10350		invalidParams.Add(request.NewErrParamMinValue("FramerateNumerator", 1))
10351	}
10352	if s.MaxBitrate != nil && *s.MaxBitrate < 1000 {
10353		invalidParams.Add(request.NewErrParamMinValue("MaxBitrate", 1000))
10354	}
10355	if s.NumberReferenceFrames != nil && *s.NumberReferenceFrames < 1 {
10356		invalidParams.Add(request.NewErrParamMinValue("NumberReferenceFrames", 1))
10357	}
10358	if s.ParDenominator != nil && *s.ParDenominator < 1 {
10359		invalidParams.Add(request.NewErrParamMinValue("ParDenominator", 1))
10360	}
10361	if s.ParNumerator != nil && *s.ParNumerator < 1 {
10362		invalidParams.Add(request.NewErrParamMinValue("ParNumerator", 1))
10363	}
10364	if s.Slices != nil && *s.Slices < 1 {
10365		invalidParams.Add(request.NewErrParamMinValue("Slices", 1))
10366	}
10367	if s.QvbrSettings != nil {
10368		if err := s.QvbrSettings.Validate(); err != nil {
10369			invalidParams.AddNested("QvbrSettings", err.(request.ErrInvalidParams))
10370		}
10371	}
10372
10373	if invalidParams.Len() > 0 {
10374		return invalidParams
10375	}
10376	return nil
10377}
10378
10379// SetAdaptiveQuantization sets the AdaptiveQuantization field's value.
10380func (s *H265Settings) SetAdaptiveQuantization(v string) *H265Settings {
10381	s.AdaptiveQuantization = &v
10382	return s
10383}
10384
10385// SetAlternateTransferFunctionSei sets the AlternateTransferFunctionSei field's value.
10386func (s *H265Settings) SetAlternateTransferFunctionSei(v string) *H265Settings {
10387	s.AlternateTransferFunctionSei = &v
10388	return s
10389}
10390
10391// SetBitrate sets the Bitrate field's value.
10392func (s *H265Settings) SetBitrate(v int64) *H265Settings {
10393	s.Bitrate = &v
10394	return s
10395}
10396
10397// SetCodecLevel sets the CodecLevel field's value.
10398func (s *H265Settings) SetCodecLevel(v string) *H265Settings {
10399	s.CodecLevel = &v
10400	return s
10401}
10402
10403// SetCodecProfile sets the CodecProfile field's value.
10404func (s *H265Settings) SetCodecProfile(v string) *H265Settings {
10405	s.CodecProfile = &v
10406	return s
10407}
10408
10409// SetDynamicSubGop sets the DynamicSubGop field's value.
10410func (s *H265Settings) SetDynamicSubGop(v string) *H265Settings {
10411	s.DynamicSubGop = &v
10412	return s
10413}
10414
10415// SetFlickerAdaptiveQuantization sets the FlickerAdaptiveQuantization field's value.
10416func (s *H265Settings) SetFlickerAdaptiveQuantization(v string) *H265Settings {
10417	s.FlickerAdaptiveQuantization = &v
10418	return s
10419}
10420
10421// SetFramerateControl sets the FramerateControl field's value.
10422func (s *H265Settings) SetFramerateControl(v string) *H265Settings {
10423	s.FramerateControl = &v
10424	return s
10425}
10426
10427// SetFramerateConversionAlgorithm sets the FramerateConversionAlgorithm field's value.
10428func (s *H265Settings) SetFramerateConversionAlgorithm(v string) *H265Settings {
10429	s.FramerateConversionAlgorithm = &v
10430	return s
10431}
10432
10433// SetFramerateDenominator sets the FramerateDenominator field's value.
10434func (s *H265Settings) SetFramerateDenominator(v int64) *H265Settings {
10435	s.FramerateDenominator = &v
10436	return s
10437}
10438
10439// SetFramerateNumerator sets the FramerateNumerator field's value.
10440func (s *H265Settings) SetFramerateNumerator(v int64) *H265Settings {
10441	s.FramerateNumerator = &v
10442	return s
10443}
10444
10445// SetGopBReference sets the GopBReference field's value.
10446func (s *H265Settings) SetGopBReference(v string) *H265Settings {
10447	s.GopBReference = &v
10448	return s
10449}
10450
10451// SetGopClosedCadence sets the GopClosedCadence field's value.
10452func (s *H265Settings) SetGopClosedCadence(v int64) *H265Settings {
10453	s.GopClosedCadence = &v
10454	return s
10455}
10456
10457// SetGopSize sets the GopSize field's value.
10458func (s *H265Settings) SetGopSize(v float64) *H265Settings {
10459	s.GopSize = &v
10460	return s
10461}
10462
10463// SetGopSizeUnits sets the GopSizeUnits field's value.
10464func (s *H265Settings) SetGopSizeUnits(v string) *H265Settings {
10465	s.GopSizeUnits = &v
10466	return s
10467}
10468
10469// SetHrdBufferInitialFillPercentage sets the HrdBufferInitialFillPercentage field's value.
10470func (s *H265Settings) SetHrdBufferInitialFillPercentage(v int64) *H265Settings {
10471	s.HrdBufferInitialFillPercentage = &v
10472	return s
10473}
10474
10475// SetHrdBufferSize sets the HrdBufferSize field's value.
10476func (s *H265Settings) SetHrdBufferSize(v int64) *H265Settings {
10477	s.HrdBufferSize = &v
10478	return s
10479}
10480
10481// SetInterlaceMode sets the InterlaceMode field's value.
10482func (s *H265Settings) SetInterlaceMode(v string) *H265Settings {
10483	s.InterlaceMode = &v
10484	return s
10485}
10486
10487// SetMaxBitrate sets the MaxBitrate field's value.
10488func (s *H265Settings) SetMaxBitrate(v int64) *H265Settings {
10489	s.MaxBitrate = &v
10490	return s
10491}
10492
10493// SetMinIInterval sets the MinIInterval field's value.
10494func (s *H265Settings) SetMinIInterval(v int64) *H265Settings {
10495	s.MinIInterval = &v
10496	return s
10497}
10498
10499// SetNumberBFramesBetweenReferenceFrames sets the NumberBFramesBetweenReferenceFrames field's value.
10500func (s *H265Settings) SetNumberBFramesBetweenReferenceFrames(v int64) *H265Settings {
10501	s.NumberBFramesBetweenReferenceFrames = &v
10502	return s
10503}
10504
10505// SetNumberReferenceFrames sets the NumberReferenceFrames field's value.
10506func (s *H265Settings) SetNumberReferenceFrames(v int64) *H265Settings {
10507	s.NumberReferenceFrames = &v
10508	return s
10509}
10510
10511// SetParControl sets the ParControl field's value.
10512func (s *H265Settings) SetParControl(v string) *H265Settings {
10513	s.ParControl = &v
10514	return s
10515}
10516
10517// SetParDenominator sets the ParDenominator field's value.
10518func (s *H265Settings) SetParDenominator(v int64) *H265Settings {
10519	s.ParDenominator = &v
10520	return s
10521}
10522
10523// SetParNumerator sets the ParNumerator field's value.
10524func (s *H265Settings) SetParNumerator(v int64) *H265Settings {
10525	s.ParNumerator = &v
10526	return s
10527}
10528
10529// SetQualityTuningLevel sets the QualityTuningLevel field's value.
10530func (s *H265Settings) SetQualityTuningLevel(v string) *H265Settings {
10531	s.QualityTuningLevel = &v
10532	return s
10533}
10534
10535// SetQvbrSettings sets the QvbrSettings field's value.
10536func (s *H265Settings) SetQvbrSettings(v *H265QvbrSettings) *H265Settings {
10537	s.QvbrSettings = v
10538	return s
10539}
10540
10541// SetRateControlMode sets the RateControlMode field's value.
10542func (s *H265Settings) SetRateControlMode(v string) *H265Settings {
10543	s.RateControlMode = &v
10544	return s
10545}
10546
10547// SetSampleAdaptiveOffsetFilterMode sets the SampleAdaptiveOffsetFilterMode field's value.
10548func (s *H265Settings) SetSampleAdaptiveOffsetFilterMode(v string) *H265Settings {
10549	s.SampleAdaptiveOffsetFilterMode = &v
10550	return s
10551}
10552
10553// SetScanTypeConversionMode sets the ScanTypeConversionMode field's value.
10554func (s *H265Settings) SetScanTypeConversionMode(v string) *H265Settings {
10555	s.ScanTypeConversionMode = &v
10556	return s
10557}
10558
10559// SetSceneChangeDetect sets the SceneChangeDetect field's value.
10560func (s *H265Settings) SetSceneChangeDetect(v string) *H265Settings {
10561	s.SceneChangeDetect = &v
10562	return s
10563}
10564
10565// SetSlices sets the Slices field's value.
10566func (s *H265Settings) SetSlices(v int64) *H265Settings {
10567	s.Slices = &v
10568	return s
10569}
10570
10571// SetSlowPal sets the SlowPal field's value.
10572func (s *H265Settings) SetSlowPal(v string) *H265Settings {
10573	s.SlowPal = &v
10574	return s
10575}
10576
10577// SetSpatialAdaptiveQuantization sets the SpatialAdaptiveQuantization field's value.
10578func (s *H265Settings) SetSpatialAdaptiveQuantization(v string) *H265Settings {
10579	s.SpatialAdaptiveQuantization = &v
10580	return s
10581}
10582
10583// SetTelecine sets the Telecine field's value.
10584func (s *H265Settings) SetTelecine(v string) *H265Settings {
10585	s.Telecine = &v
10586	return s
10587}
10588
10589// SetTemporalAdaptiveQuantization sets the TemporalAdaptiveQuantization field's value.
10590func (s *H265Settings) SetTemporalAdaptiveQuantization(v string) *H265Settings {
10591	s.TemporalAdaptiveQuantization = &v
10592	return s
10593}
10594
10595// SetTemporalIds sets the TemporalIds field's value.
10596func (s *H265Settings) SetTemporalIds(v string) *H265Settings {
10597	s.TemporalIds = &v
10598	return s
10599}
10600
10601// SetTiles sets the Tiles field's value.
10602func (s *H265Settings) SetTiles(v string) *H265Settings {
10603	s.Tiles = &v
10604	return s
10605}
10606
10607// SetUnregisteredSeiTimecode sets the UnregisteredSeiTimecode field's value.
10608func (s *H265Settings) SetUnregisteredSeiTimecode(v string) *H265Settings {
10609	s.UnregisteredSeiTimecode = &v
10610	return s
10611}
10612
10613// SetWriteMp4PackagingType sets the WriteMp4PackagingType field's value.
10614func (s *H265Settings) SetWriteMp4PackagingType(v string) *H265Settings {
10615	s.WriteMp4PackagingType = &v
10616	return s
10617}
10618
10619// Use these settings to specify static color calibration metadata, as defined
10620// by SMPTE ST 2086. These values don't affect the pixel values that are encoded
10621// in the video stream. They are intended to help the downstream video player
10622// display content in a way that reflects the intentions of the the content
10623// creator.
10624type Hdr10Metadata struct {
10625	_ struct{} `type:"structure"`
10626
10627	// HDR Master Display Information must be provided by a color grader, using
10628	// color grading tools. Range is 0 to 50,000, each increment represents 0.00002
10629	// in CIE1931 color coordinate. Note that this setting is not for color correction.
10630	BluePrimaryX *int64 `locationName:"bluePrimaryX" type:"integer"`
10631
10632	// HDR Master Display Information must be provided by a color grader, using
10633	// color grading tools. Range is 0 to 50,000, each increment represents 0.00002
10634	// in CIE1931 color coordinate. Note that this setting is not for color correction.
10635	BluePrimaryY *int64 `locationName:"bluePrimaryY" type:"integer"`
10636
10637	// HDR Master Display Information must be provided by a color grader, using
10638	// color grading tools. Range is 0 to 50,000, each increment represents 0.00002
10639	// in CIE1931 color coordinate. Note that this setting is not for color correction.
10640	GreenPrimaryX *int64 `locationName:"greenPrimaryX" type:"integer"`
10641
10642	// HDR Master Display Information must be provided by a color grader, using
10643	// color grading tools. Range is 0 to 50,000, each increment represents 0.00002
10644	// in CIE1931 color coordinate. Note that this setting is not for color correction.
10645	GreenPrimaryY *int64 `locationName:"greenPrimaryY" type:"integer"`
10646
10647	// Maximum light level among all samples in the coded video sequence, in units
10648	// of candelas per square meter. This setting doesn't have a default value;
10649	// you must specify a value that is suitable for the content.
10650	MaxContentLightLevel *int64 `locationName:"maxContentLightLevel" type:"integer"`
10651
10652	// Maximum average light level of any frame in the coded video sequence, in
10653	// units of candelas per square meter. This setting doesn't have a default value;
10654	// you must specify a value that is suitable for the content.
10655	MaxFrameAverageLightLevel *int64 `locationName:"maxFrameAverageLightLevel" type:"integer"`
10656
10657	// Nominal maximum mastering display luminance in units of of 0.0001 candelas
10658	// per square meter.
10659	MaxLuminance *int64 `locationName:"maxLuminance" type:"integer"`
10660
10661	// Nominal minimum mastering display luminance in units of of 0.0001 candelas
10662	// per square meter
10663	MinLuminance *int64 `locationName:"minLuminance" type:"integer"`
10664
10665	// HDR Master Display Information must be provided by a color grader, using
10666	// color grading tools. Range is 0 to 50,000, each increment represents 0.00002
10667	// in CIE1931 color coordinate. Note that this setting is not for color correction.
10668	RedPrimaryX *int64 `locationName:"redPrimaryX" type:"integer"`
10669
10670	// HDR Master Display Information must be provided by a color grader, using
10671	// color grading tools. Range is 0 to 50,000, each increment represents 0.00002
10672	// in CIE1931 color coordinate. Note that this setting is not for color correction.
10673	RedPrimaryY *int64 `locationName:"redPrimaryY" type:"integer"`
10674
10675	// HDR Master Display Information must be provided by a color grader, using
10676	// color grading tools. Range is 0 to 50,000, each increment represents 0.00002
10677	// in CIE1931 color coordinate. Note that this setting is not for color correction.
10678	WhitePointX *int64 `locationName:"whitePointX" type:"integer"`
10679
10680	// HDR Master Display Information must be provided by a color grader, using
10681	// color grading tools. Range is 0 to 50,000, each increment represents 0.00002
10682	// in CIE1931 color coordinate. Note that this setting is not for color correction.
10683	WhitePointY *int64 `locationName:"whitePointY" type:"integer"`
10684}
10685
10686// String returns the string representation
10687func (s Hdr10Metadata) String() string {
10688	return awsutil.Prettify(s)
10689}
10690
10691// GoString returns the string representation
10692func (s Hdr10Metadata) GoString() string {
10693	return s.String()
10694}
10695
10696// SetBluePrimaryX sets the BluePrimaryX field's value.
10697func (s *Hdr10Metadata) SetBluePrimaryX(v int64) *Hdr10Metadata {
10698	s.BluePrimaryX = &v
10699	return s
10700}
10701
10702// SetBluePrimaryY sets the BluePrimaryY field's value.
10703func (s *Hdr10Metadata) SetBluePrimaryY(v int64) *Hdr10Metadata {
10704	s.BluePrimaryY = &v
10705	return s
10706}
10707
10708// SetGreenPrimaryX sets the GreenPrimaryX field's value.
10709func (s *Hdr10Metadata) SetGreenPrimaryX(v int64) *Hdr10Metadata {
10710	s.GreenPrimaryX = &v
10711	return s
10712}
10713
10714// SetGreenPrimaryY sets the GreenPrimaryY field's value.
10715func (s *Hdr10Metadata) SetGreenPrimaryY(v int64) *Hdr10Metadata {
10716	s.GreenPrimaryY = &v
10717	return s
10718}
10719
10720// SetMaxContentLightLevel sets the MaxContentLightLevel field's value.
10721func (s *Hdr10Metadata) SetMaxContentLightLevel(v int64) *Hdr10Metadata {
10722	s.MaxContentLightLevel = &v
10723	return s
10724}
10725
10726// SetMaxFrameAverageLightLevel sets the MaxFrameAverageLightLevel field's value.
10727func (s *Hdr10Metadata) SetMaxFrameAverageLightLevel(v int64) *Hdr10Metadata {
10728	s.MaxFrameAverageLightLevel = &v
10729	return s
10730}
10731
10732// SetMaxLuminance sets the MaxLuminance field's value.
10733func (s *Hdr10Metadata) SetMaxLuminance(v int64) *Hdr10Metadata {
10734	s.MaxLuminance = &v
10735	return s
10736}
10737
10738// SetMinLuminance sets the MinLuminance field's value.
10739func (s *Hdr10Metadata) SetMinLuminance(v int64) *Hdr10Metadata {
10740	s.MinLuminance = &v
10741	return s
10742}
10743
10744// SetRedPrimaryX sets the RedPrimaryX field's value.
10745func (s *Hdr10Metadata) SetRedPrimaryX(v int64) *Hdr10Metadata {
10746	s.RedPrimaryX = &v
10747	return s
10748}
10749
10750// SetRedPrimaryY sets the RedPrimaryY field's value.
10751func (s *Hdr10Metadata) SetRedPrimaryY(v int64) *Hdr10Metadata {
10752	s.RedPrimaryY = &v
10753	return s
10754}
10755
10756// SetWhitePointX sets the WhitePointX field's value.
10757func (s *Hdr10Metadata) SetWhitePointX(v int64) *Hdr10Metadata {
10758	s.WhitePointX = &v
10759	return s
10760}
10761
10762// SetWhitePointY sets the WhitePointY field's value.
10763func (s *Hdr10Metadata) SetWhitePointY(v int64) *Hdr10Metadata {
10764	s.WhitePointY = &v
10765	return s
10766}
10767
10768// Specify the details for each additional HLS manifest that you want the service
10769// to generate for this output group. Each manifest can reference a different
10770// subset of outputs in the group.
10771type HlsAdditionalManifest struct {
10772	_ struct{} `type:"structure"`
10773
10774	// Specify a name modifier that the service adds to the name of this manifest
10775	// to make it different from the file names of the other main manifests in the
10776	// output group. For example, say that the default main manifest for your HLS
10777	// group is film-name.m3u8. If you enter "-no-premium" for this setting, then
10778	// the file name the service generates for this top-level manifest is film-name-no-premium.m3u8.
10779	// For HLS output groups, specify a manifestNameModifier that is different from
10780	// the nameModifier of the output. The service uses the output name modifier
10781	// to create unique names for the individual variant manifests.
10782	ManifestNameModifier *string `locationName:"manifestNameModifier" min:"1" type:"string"`
10783
10784	// Specify the outputs that you want this additional top-level manifest to reference.
10785	SelectedOutputs []*string `locationName:"selectedOutputs" type:"list"`
10786}
10787
10788// String returns the string representation
10789func (s HlsAdditionalManifest) String() string {
10790	return awsutil.Prettify(s)
10791}
10792
10793// GoString returns the string representation
10794func (s HlsAdditionalManifest) GoString() string {
10795	return s.String()
10796}
10797
10798// Validate inspects the fields of the type to determine if they are valid.
10799func (s *HlsAdditionalManifest) Validate() error {
10800	invalidParams := request.ErrInvalidParams{Context: "HlsAdditionalManifest"}
10801	if s.ManifestNameModifier != nil && len(*s.ManifestNameModifier) < 1 {
10802		invalidParams.Add(request.NewErrParamMinLen("ManifestNameModifier", 1))
10803	}
10804
10805	if invalidParams.Len() > 0 {
10806		return invalidParams
10807	}
10808	return nil
10809}
10810
10811// SetManifestNameModifier sets the ManifestNameModifier field's value.
10812func (s *HlsAdditionalManifest) SetManifestNameModifier(v string) *HlsAdditionalManifest {
10813	s.ManifestNameModifier = &v
10814	return s
10815}
10816
10817// SetSelectedOutputs sets the SelectedOutputs field's value.
10818func (s *HlsAdditionalManifest) SetSelectedOutputs(v []*string) *HlsAdditionalManifest {
10819	s.SelectedOutputs = v
10820	return s
10821}
10822
10823// Caption Language Mapping
10824type HlsCaptionLanguageMapping struct {
10825	_ struct{} `type:"structure"`
10826
10827	// Caption channel.
10828	CaptionChannel *int64 `locationName:"captionChannel" type:"integer"`
10829
10830	// Specify the language for this captions channel, using the ISO 639-2 or ISO
10831	// 639-3 three-letter language code
10832	CustomLanguageCode *string `locationName:"customLanguageCode" min:"3" type:"string"`
10833
10834	// Specify the language, using the ISO 639-2 three-letter code listed at https://www.loc.gov/standards/iso639-2/php/code_list.php.
10835	LanguageCode *string `locationName:"languageCode" type:"string" enum:"LanguageCode"`
10836
10837	// Caption language description.
10838	LanguageDescription *string `locationName:"languageDescription" type:"string"`
10839}
10840
10841// String returns the string representation
10842func (s HlsCaptionLanguageMapping) String() string {
10843	return awsutil.Prettify(s)
10844}
10845
10846// GoString returns the string representation
10847func (s HlsCaptionLanguageMapping) GoString() string {
10848	return s.String()
10849}
10850
10851// Validate inspects the fields of the type to determine if they are valid.
10852func (s *HlsCaptionLanguageMapping) Validate() error {
10853	invalidParams := request.ErrInvalidParams{Context: "HlsCaptionLanguageMapping"}
10854	if s.CaptionChannel != nil && *s.CaptionChannel < -2.147483648e+09 {
10855		invalidParams.Add(request.NewErrParamMinValue("CaptionChannel", -2.147483648e+09))
10856	}
10857	if s.CustomLanguageCode != nil && len(*s.CustomLanguageCode) < 3 {
10858		invalidParams.Add(request.NewErrParamMinLen("CustomLanguageCode", 3))
10859	}
10860
10861	if invalidParams.Len() > 0 {
10862		return invalidParams
10863	}
10864	return nil
10865}
10866
10867// SetCaptionChannel sets the CaptionChannel field's value.
10868func (s *HlsCaptionLanguageMapping) SetCaptionChannel(v int64) *HlsCaptionLanguageMapping {
10869	s.CaptionChannel = &v
10870	return s
10871}
10872
10873// SetCustomLanguageCode sets the CustomLanguageCode field's value.
10874func (s *HlsCaptionLanguageMapping) SetCustomLanguageCode(v string) *HlsCaptionLanguageMapping {
10875	s.CustomLanguageCode = &v
10876	return s
10877}
10878
10879// SetLanguageCode sets the LanguageCode field's value.
10880func (s *HlsCaptionLanguageMapping) SetLanguageCode(v string) *HlsCaptionLanguageMapping {
10881	s.LanguageCode = &v
10882	return s
10883}
10884
10885// SetLanguageDescription sets the LanguageDescription field's value.
10886func (s *HlsCaptionLanguageMapping) SetLanguageDescription(v string) *HlsCaptionLanguageMapping {
10887	s.LanguageDescription = &v
10888	return s
10889}
10890
10891// Settings for HLS encryption
10892type HlsEncryptionSettings struct {
10893	_ struct{} `type:"structure"`
10894
10895	// This is a 128-bit, 16-byte hex value represented by a 32-character text string.
10896	// If this parameter is not set then the Initialization Vector will follow the
10897	// segment number by default.
10898	ConstantInitializationVector *string `locationName:"constantInitializationVector" min:"32" type:"string"`
10899
10900	// Encrypts the segments with the given encryption scheme. Leave blank to disable.
10901	// Selecting 'Disabled' in the web interface also disables encryption.
10902	EncryptionMethod *string `locationName:"encryptionMethod" type:"string" enum:"HlsEncryptionType"`
10903
10904	// The Initialization Vector is a 128-bit number used in conjunction with the
10905	// key for encrypting blocks. If set to INCLUDE, Initialization Vector is listed
10906	// in the manifest. Otherwise Initialization Vector is not in the manifest.
10907	InitializationVectorInManifest *string `locationName:"initializationVectorInManifest" type:"string" enum:"HlsInitializationVectorInManifest"`
10908
10909	// Enable this setting to insert the EXT-X-SESSION-KEY element into the master
10910	// playlist. This allows for offline Apple HLS FairPlay content protection.
10911	OfflineEncrypted *string `locationName:"offlineEncrypted" type:"string" enum:"HlsOfflineEncrypted"`
10912
10913	// If your output group type is HLS, DASH, or Microsoft Smooth, use these settings
10914	// when doing DRM encryption with a SPEKE-compliant key provider. If your output
10915	// group type is CMAF, use the SpekeKeyProviderCmaf settings instead.
10916	SpekeKeyProvider *SpekeKeyProvider `locationName:"spekeKeyProvider" type:"structure"`
10917
10918	// Use these settings to set up encryption with a static key provider.
10919	StaticKeyProvider *StaticKeyProvider `locationName:"staticKeyProvider" type:"structure"`
10920
10921	// Specify whether your DRM encryption key is static or from a key provider
10922	// that follows the SPEKE standard. For more information about SPEKE, see https://docs.aws.amazon.com/speke/latest/documentation/what-is-speke.html.
10923	Type *string `locationName:"type" type:"string" enum:"HlsKeyProviderType"`
10924}
10925
10926// String returns the string representation
10927func (s HlsEncryptionSettings) String() string {
10928	return awsutil.Prettify(s)
10929}
10930
10931// GoString returns the string representation
10932func (s HlsEncryptionSettings) GoString() string {
10933	return s.String()
10934}
10935
10936// Validate inspects the fields of the type to determine if they are valid.
10937func (s *HlsEncryptionSettings) Validate() error {
10938	invalidParams := request.ErrInvalidParams{Context: "HlsEncryptionSettings"}
10939	if s.ConstantInitializationVector != nil && len(*s.ConstantInitializationVector) < 32 {
10940		invalidParams.Add(request.NewErrParamMinLen("ConstantInitializationVector", 32))
10941	}
10942
10943	if invalidParams.Len() > 0 {
10944		return invalidParams
10945	}
10946	return nil
10947}
10948
10949// SetConstantInitializationVector sets the ConstantInitializationVector field's value.
10950func (s *HlsEncryptionSettings) SetConstantInitializationVector(v string) *HlsEncryptionSettings {
10951	s.ConstantInitializationVector = &v
10952	return s
10953}
10954
10955// SetEncryptionMethod sets the EncryptionMethod field's value.
10956func (s *HlsEncryptionSettings) SetEncryptionMethod(v string) *HlsEncryptionSettings {
10957	s.EncryptionMethod = &v
10958	return s
10959}
10960
10961// SetInitializationVectorInManifest sets the InitializationVectorInManifest field's value.
10962func (s *HlsEncryptionSettings) SetInitializationVectorInManifest(v string) *HlsEncryptionSettings {
10963	s.InitializationVectorInManifest = &v
10964	return s
10965}
10966
10967// SetOfflineEncrypted sets the OfflineEncrypted field's value.
10968func (s *HlsEncryptionSettings) SetOfflineEncrypted(v string) *HlsEncryptionSettings {
10969	s.OfflineEncrypted = &v
10970	return s
10971}
10972
10973// SetSpekeKeyProvider sets the SpekeKeyProvider field's value.
10974func (s *HlsEncryptionSettings) SetSpekeKeyProvider(v *SpekeKeyProvider) *HlsEncryptionSettings {
10975	s.SpekeKeyProvider = v
10976	return s
10977}
10978
10979// SetStaticKeyProvider sets the StaticKeyProvider field's value.
10980func (s *HlsEncryptionSettings) SetStaticKeyProvider(v *StaticKeyProvider) *HlsEncryptionSettings {
10981	s.StaticKeyProvider = v
10982	return s
10983}
10984
10985// SetType sets the Type field's value.
10986func (s *HlsEncryptionSettings) SetType(v string) *HlsEncryptionSettings {
10987	s.Type = &v
10988	return s
10989}
10990
10991// Required when you set (Type) under (OutputGroups)>(OutputGroupSettings) to
10992// HLS_GROUP_SETTINGS.
10993type HlsGroupSettings struct {
10994	_ struct{} `type:"structure"`
10995
10996	// Choose one or more ad marker types to decorate your Apple HLS manifest. This
10997	// setting does not determine whether SCTE-35 markers appear in the outputs
10998	// themselves.
10999	AdMarkers []*string `locationName:"adMarkers" type:"list"`
11000
11001	// By default, the service creates one top-level .m3u8 HLS manifest for each
11002	// HLS output group in your job. This default manifest references every output
11003	// in the output group. To create additional top-level manifests that reference
11004	// a subset of the outputs in the output group, specify a list of them here.
11005	AdditionalManifests []*HlsAdditionalManifest `locationName:"additionalManifests" type:"list"`
11006
11007	// Ignore this setting unless you are using FairPlay DRM with Verimatrix and
11008	// you encounter playback issues. Keep the default value, Include (INCLUDE),
11009	// to output audio-only headers. Choose Exclude (EXCLUDE) to remove the audio-only
11010	// headers from your audio segments.
11011	AudioOnlyHeader *string `locationName:"audioOnlyHeader" type:"string" enum:"HlsAudioOnlyHeader"`
11012
11013	// A partial URI prefix that will be prepended to each output in the media .m3u8
11014	// file. Can be used if base manifest is delivered from a different URL than
11015	// the main .m3u8 file.
11016	BaseUrl *string `locationName:"baseUrl" type:"string"`
11017
11018	// Language to be used on Caption outputs
11019	CaptionLanguageMappings []*HlsCaptionLanguageMapping `locationName:"captionLanguageMappings" type:"list"`
11020
11021	// Applies only to 608 Embedded output captions. Insert: Include CLOSED-CAPTIONS
11022	// lines in the manifest. Specify at least one language in the CC1 Language
11023	// Code field. One CLOSED-CAPTION line is added for each Language Code you specify.
11024	// Make sure to specify the languages in the order in which they appear in the
11025	// original source (if the source is embedded format) or the order of the caption
11026	// selectors (if the source is other than embedded). Otherwise, languages in
11027	// the manifest will not match up properly with the output captions. None: Include
11028	// CLOSED-CAPTIONS=NONE line in the manifest. Omit: Omit any CLOSED-CAPTIONS
11029	// line from the manifest.
11030	CaptionLanguageSetting *string `locationName:"captionLanguageSetting" type:"string" enum:"HlsCaptionLanguageSetting"`
11031
11032	// Disable this setting only when your workflow requires the #EXT-X-ALLOW-CACHE:no
11033	// tag. Otherwise, keep the default value Enabled (ENABLED) and control caching
11034	// in your video distribution set up. For example, use the Cache-Control http
11035	// header.
11036	ClientCache *string `locationName:"clientCache" type:"string" enum:"HlsClientCache"`
11037
11038	// Specification to use (RFC-6381 or the default RFC-4281) during m3u8 playlist
11039	// generation.
11040	CodecSpecification *string `locationName:"codecSpecification" type:"string" enum:"HlsCodecSpecification"`
11041
11042	// Use Destination (Destination) to specify the S3 output location and the output
11043	// filename base. Destination accepts format identifiers. If you do not specify
11044	// the base filename in the URI, the service will use the filename of the input
11045	// file. If your job has multiple inputs, the service uses the filename of the
11046	// first input file.
11047	Destination *string `locationName:"destination" type:"string"`
11048
11049	// Settings associated with the destination. Will vary based on the type of
11050	// destination
11051	DestinationSettings *DestinationSettings `locationName:"destinationSettings" type:"structure"`
11052
11053	// Indicates whether segments should be placed in subdirectories.
11054	DirectoryStructure *string `locationName:"directoryStructure" type:"string" enum:"HlsDirectoryStructure"`
11055
11056	// DRM settings.
11057	Encryption *HlsEncryptionSettings `locationName:"encryption" type:"structure"`
11058
11059	// When set to GZIP, compresses HLS playlist.
11060	ManifestCompression *string `locationName:"manifestCompression" type:"string" enum:"HlsManifestCompression"`
11061
11062	// Indicates whether the output manifest should use floating point values for
11063	// segment duration.
11064	ManifestDurationFormat *string `locationName:"manifestDurationFormat" type:"string" enum:"HlsManifestDurationFormat"`
11065
11066	// Keep this setting at the default value of 0, unless you are troubleshooting
11067	// a problem with how devices play back the end of your video asset. If you
11068	// know that player devices are hanging on the final segment of your video because
11069	// the length of your final segment is too short, use this setting to specify
11070	// a minimum final segment length, in seconds. Choose a value that is greater
11071	// than or equal to 1 and less than your segment length. When you specify a
11072	// value for this setting, the encoder will combine any final segment that is
11073	// shorter than the length that you specify with the previous segment. For example,
11074	// your segment length is 3 seconds and your final segment is .5 seconds without
11075	// a minimum final segment length; when you set the minimum final segment length
11076	// to 1, your final segment is 3.5 seconds.
11077	MinFinalSegmentLength *float64 `locationName:"minFinalSegmentLength" type:"double"`
11078
11079	// When set, Minimum Segment Size is enforced by looking ahead and back within
11080	// the specified range for a nearby avail and extending the segment size if
11081	// needed.
11082	MinSegmentLength *int64 `locationName:"minSegmentLength" type:"integer"`
11083
11084	// Indicates whether the .m3u8 manifest file should be generated for this HLS
11085	// output group.
11086	OutputSelection *string `locationName:"outputSelection" type:"string" enum:"HlsOutputSelection"`
11087
11088	// Includes or excludes EXT-X-PROGRAM-DATE-TIME tag in .m3u8 manifest files.
11089	// The value is calculated as follows: either the program date and time are
11090	// initialized using the input timecode source, or the time is initialized using
11091	// the input timecode source and the date is initialized using the timestamp_offset.
11092	ProgramDateTime *string `locationName:"programDateTime" type:"string" enum:"HlsProgramDateTime"`
11093
11094	// Period of insertion of EXT-X-PROGRAM-DATE-TIME entry, in seconds.
11095	ProgramDateTimePeriod *int64 `locationName:"programDateTimePeriod" type:"integer"`
11096
11097	// When set to SINGLE_FILE, emits program as a single media resource (.ts) file,
11098	// uses #EXT-X-BYTERANGE tags to index segment for playback.
11099	SegmentControl *string `locationName:"segmentControl" type:"string" enum:"HlsSegmentControl"`
11100
11101	// Length of MPEG-2 Transport Stream segments to create (in seconds). Note that
11102	// segments will end on the next keyframe after this number of seconds, so actual
11103	// segment length may be longer.
11104	SegmentLength *int64 `locationName:"segmentLength" min:"1" type:"integer"`
11105
11106	// Number of segments to write to a subdirectory before starting a new one.
11107	// directoryStructure must be SINGLE_DIRECTORY for this setting to have an effect.
11108	SegmentsPerSubdirectory *int64 `locationName:"segmentsPerSubdirectory" min:"1" type:"integer"`
11109
11110	// Include or exclude RESOLUTION attribute for video in EXT-X-STREAM-INF tag
11111	// of variant manifest.
11112	StreamInfResolution *string `locationName:"streamInfResolution" type:"string" enum:"HlsStreamInfResolution"`
11113
11114	// Indicates ID3 frame that has the timecode.
11115	TimedMetadataId3Frame *string `locationName:"timedMetadataId3Frame" type:"string" enum:"HlsTimedMetadataId3Frame"`
11116
11117	// Timed Metadata interval in seconds.
11118	TimedMetadataId3Period *int64 `locationName:"timedMetadataId3Period" type:"integer"`
11119
11120	// Provides an extra millisecond delta offset to fine tune the timestamps.
11121	TimestampDeltaMilliseconds *int64 `locationName:"timestampDeltaMilliseconds" type:"integer"`
11122}
11123
11124// String returns the string representation
11125func (s HlsGroupSettings) String() string {
11126	return awsutil.Prettify(s)
11127}
11128
11129// GoString returns the string representation
11130func (s HlsGroupSettings) GoString() string {
11131	return s.String()
11132}
11133
11134// Validate inspects the fields of the type to determine if they are valid.
11135func (s *HlsGroupSettings) Validate() error {
11136	invalidParams := request.ErrInvalidParams{Context: "HlsGroupSettings"}
11137	if s.SegmentLength != nil && *s.SegmentLength < 1 {
11138		invalidParams.Add(request.NewErrParamMinValue("SegmentLength", 1))
11139	}
11140	if s.SegmentsPerSubdirectory != nil && *s.SegmentsPerSubdirectory < 1 {
11141		invalidParams.Add(request.NewErrParamMinValue("SegmentsPerSubdirectory", 1))
11142	}
11143	if s.TimedMetadataId3Period != nil && *s.TimedMetadataId3Period < -2.147483648e+09 {
11144		invalidParams.Add(request.NewErrParamMinValue("TimedMetadataId3Period", -2.147483648e+09))
11145	}
11146	if s.TimestampDeltaMilliseconds != nil && *s.TimestampDeltaMilliseconds < -2.147483648e+09 {
11147		invalidParams.Add(request.NewErrParamMinValue("TimestampDeltaMilliseconds", -2.147483648e+09))
11148	}
11149	if s.AdditionalManifests != nil {
11150		for i, v := range s.AdditionalManifests {
11151			if v == nil {
11152				continue
11153			}
11154			if err := v.Validate(); err != nil {
11155				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "AdditionalManifests", i), err.(request.ErrInvalidParams))
11156			}
11157		}
11158	}
11159	if s.CaptionLanguageMappings != nil {
11160		for i, v := range s.CaptionLanguageMappings {
11161			if v == nil {
11162				continue
11163			}
11164			if err := v.Validate(); err != nil {
11165				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "CaptionLanguageMappings", i), err.(request.ErrInvalidParams))
11166			}
11167		}
11168	}
11169	if s.Encryption != nil {
11170		if err := s.Encryption.Validate(); err != nil {
11171			invalidParams.AddNested("Encryption", err.(request.ErrInvalidParams))
11172		}
11173	}
11174
11175	if invalidParams.Len() > 0 {
11176		return invalidParams
11177	}
11178	return nil
11179}
11180
11181// SetAdMarkers sets the AdMarkers field's value.
11182func (s *HlsGroupSettings) SetAdMarkers(v []*string) *HlsGroupSettings {
11183	s.AdMarkers = v
11184	return s
11185}
11186
11187// SetAdditionalManifests sets the AdditionalManifests field's value.
11188func (s *HlsGroupSettings) SetAdditionalManifests(v []*HlsAdditionalManifest) *HlsGroupSettings {
11189	s.AdditionalManifests = v
11190	return s
11191}
11192
11193// SetAudioOnlyHeader sets the AudioOnlyHeader field's value.
11194func (s *HlsGroupSettings) SetAudioOnlyHeader(v string) *HlsGroupSettings {
11195	s.AudioOnlyHeader = &v
11196	return s
11197}
11198
11199// SetBaseUrl sets the BaseUrl field's value.
11200func (s *HlsGroupSettings) SetBaseUrl(v string) *HlsGroupSettings {
11201	s.BaseUrl = &v
11202	return s
11203}
11204
11205// SetCaptionLanguageMappings sets the CaptionLanguageMappings field's value.
11206func (s *HlsGroupSettings) SetCaptionLanguageMappings(v []*HlsCaptionLanguageMapping) *HlsGroupSettings {
11207	s.CaptionLanguageMappings = v
11208	return s
11209}
11210
11211// SetCaptionLanguageSetting sets the CaptionLanguageSetting field's value.
11212func (s *HlsGroupSettings) SetCaptionLanguageSetting(v string) *HlsGroupSettings {
11213	s.CaptionLanguageSetting = &v
11214	return s
11215}
11216
11217// SetClientCache sets the ClientCache field's value.
11218func (s *HlsGroupSettings) SetClientCache(v string) *HlsGroupSettings {
11219	s.ClientCache = &v
11220	return s
11221}
11222
11223// SetCodecSpecification sets the CodecSpecification field's value.
11224func (s *HlsGroupSettings) SetCodecSpecification(v string) *HlsGroupSettings {
11225	s.CodecSpecification = &v
11226	return s
11227}
11228
11229// SetDestination sets the Destination field's value.
11230func (s *HlsGroupSettings) SetDestination(v string) *HlsGroupSettings {
11231	s.Destination = &v
11232	return s
11233}
11234
11235// SetDestinationSettings sets the DestinationSettings field's value.
11236func (s *HlsGroupSettings) SetDestinationSettings(v *DestinationSettings) *HlsGroupSettings {
11237	s.DestinationSettings = v
11238	return s
11239}
11240
11241// SetDirectoryStructure sets the DirectoryStructure field's value.
11242func (s *HlsGroupSettings) SetDirectoryStructure(v string) *HlsGroupSettings {
11243	s.DirectoryStructure = &v
11244	return s
11245}
11246
11247// SetEncryption sets the Encryption field's value.
11248func (s *HlsGroupSettings) SetEncryption(v *HlsEncryptionSettings) *HlsGroupSettings {
11249	s.Encryption = v
11250	return s
11251}
11252
11253// SetManifestCompression sets the ManifestCompression field's value.
11254func (s *HlsGroupSettings) SetManifestCompression(v string) *HlsGroupSettings {
11255	s.ManifestCompression = &v
11256	return s
11257}
11258
11259// SetManifestDurationFormat sets the ManifestDurationFormat field's value.
11260func (s *HlsGroupSettings) SetManifestDurationFormat(v string) *HlsGroupSettings {
11261	s.ManifestDurationFormat = &v
11262	return s
11263}
11264
11265// SetMinFinalSegmentLength sets the MinFinalSegmentLength field's value.
11266func (s *HlsGroupSettings) SetMinFinalSegmentLength(v float64) *HlsGroupSettings {
11267	s.MinFinalSegmentLength = &v
11268	return s
11269}
11270
11271// SetMinSegmentLength sets the MinSegmentLength field's value.
11272func (s *HlsGroupSettings) SetMinSegmentLength(v int64) *HlsGroupSettings {
11273	s.MinSegmentLength = &v
11274	return s
11275}
11276
11277// SetOutputSelection sets the OutputSelection field's value.
11278func (s *HlsGroupSettings) SetOutputSelection(v string) *HlsGroupSettings {
11279	s.OutputSelection = &v
11280	return s
11281}
11282
11283// SetProgramDateTime sets the ProgramDateTime field's value.
11284func (s *HlsGroupSettings) SetProgramDateTime(v string) *HlsGroupSettings {
11285	s.ProgramDateTime = &v
11286	return s
11287}
11288
11289// SetProgramDateTimePeriod sets the ProgramDateTimePeriod field's value.
11290func (s *HlsGroupSettings) SetProgramDateTimePeriod(v int64) *HlsGroupSettings {
11291	s.ProgramDateTimePeriod = &v
11292	return s
11293}
11294
11295// SetSegmentControl sets the SegmentControl field's value.
11296func (s *HlsGroupSettings) SetSegmentControl(v string) *HlsGroupSettings {
11297	s.SegmentControl = &v
11298	return s
11299}
11300
11301// SetSegmentLength sets the SegmentLength field's value.
11302func (s *HlsGroupSettings) SetSegmentLength(v int64) *HlsGroupSettings {
11303	s.SegmentLength = &v
11304	return s
11305}
11306
11307// SetSegmentsPerSubdirectory sets the SegmentsPerSubdirectory field's value.
11308func (s *HlsGroupSettings) SetSegmentsPerSubdirectory(v int64) *HlsGroupSettings {
11309	s.SegmentsPerSubdirectory = &v
11310	return s
11311}
11312
11313// SetStreamInfResolution sets the StreamInfResolution field's value.
11314func (s *HlsGroupSettings) SetStreamInfResolution(v string) *HlsGroupSettings {
11315	s.StreamInfResolution = &v
11316	return s
11317}
11318
11319// SetTimedMetadataId3Frame sets the TimedMetadataId3Frame field's value.
11320func (s *HlsGroupSettings) SetTimedMetadataId3Frame(v string) *HlsGroupSettings {
11321	s.TimedMetadataId3Frame = &v
11322	return s
11323}
11324
11325// SetTimedMetadataId3Period sets the TimedMetadataId3Period field's value.
11326func (s *HlsGroupSettings) SetTimedMetadataId3Period(v int64) *HlsGroupSettings {
11327	s.TimedMetadataId3Period = &v
11328	return s
11329}
11330
11331// SetTimestampDeltaMilliseconds sets the TimestampDeltaMilliseconds field's value.
11332func (s *HlsGroupSettings) SetTimestampDeltaMilliseconds(v int64) *HlsGroupSettings {
11333	s.TimestampDeltaMilliseconds = &v
11334	return s
11335}
11336
11337// Settings for HLS output groups
11338type HlsSettings struct {
11339	_ struct{} `type:"structure"`
11340
11341	// Specifies the group to which the audio Rendition belongs.
11342	AudioGroupId *string `locationName:"audioGroupId" type:"string"`
11343
11344	// Use this setting only in audio-only outputs. Choose MPEG-2 Transport Stream
11345	// (M2TS) to create a file in an MPEG2-TS container. Keep the default value
11346	// Automatic (AUTOMATIC) to create an audio-only file in a raw container. Regardless
11347	// of the value that you specify here, if this output has video, the service
11348	// will place the output into an MPEG2-TS container.
11349	AudioOnlyContainer *string `locationName:"audioOnlyContainer" type:"string" enum:"HlsAudioOnlyContainer"`
11350
11351	// List all the audio groups that are used with the video output stream. Input
11352	// all the audio GROUP-IDs that are associated to the video, separate by ','.
11353	AudioRenditionSets *string `locationName:"audioRenditionSets" type:"string"`
11354
11355	// Four types of audio-only tracks are supported: Audio-Only Variant Stream
11356	// The client can play back this audio-only stream instead of video in low-bandwidth
11357	// scenarios. Represented as an EXT-X-STREAM-INF in the HLS manifest. Alternate
11358	// Audio, Auto Select, Default Alternate rendition that the client should try
11359	// to play back by default. Represented as an EXT-X-MEDIA in the HLS manifest
11360	// with DEFAULT=YES, AUTOSELECT=YES Alternate Audio, Auto Select, Not Default
11361	// Alternate rendition that the client may try to play back by default. Represented
11362	// as an EXT-X-MEDIA in the HLS manifest with DEFAULT=NO, AUTOSELECT=YES Alternate
11363	// Audio, not Auto Select Alternate rendition that the client will not try to
11364	// play back by default. Represented as an EXT-X-MEDIA in the HLS manifest with
11365	// DEFAULT=NO, AUTOSELECT=NO
11366	AudioTrackType *string `locationName:"audioTrackType" type:"string" enum:"HlsAudioTrackType"`
11367
11368	// Choose Include (INCLUDE) to have MediaConvert generate a child manifest that
11369	// lists only the I-frames for this rendition, in addition to your regular manifest
11370	// for this rendition. You might use this manifest as part of a workflow that
11371	// creates preview functions for your video. MediaConvert adds both the I-frame
11372	// only child manifest and the regular child manifest to the parent manifest.
11373	// When you don't need the I-frame only child manifest, keep the default value
11374	// Exclude (EXCLUDE).
11375	IFrameOnlyManifest *string `locationName:"iFrameOnlyManifest" type:"string" enum:"HlsIFrameOnlyManifest"`
11376
11377	// Use this setting to add an identifying string to the filename of each segment.
11378	// The service adds this string between the name modifier and segment index
11379	// number. You can use format identifiers in the string. For more information,
11380	// see https://docs.aws.amazon.com/mediaconvert/latest/ug/using-variables-in-your-job-settings.html
11381	SegmentModifier *string `locationName:"segmentModifier" type:"string"`
11382}
11383
11384// String returns the string representation
11385func (s HlsSettings) String() string {
11386	return awsutil.Prettify(s)
11387}
11388
11389// GoString returns the string representation
11390func (s HlsSettings) GoString() string {
11391	return s.String()
11392}
11393
11394// SetAudioGroupId sets the AudioGroupId field's value.
11395func (s *HlsSettings) SetAudioGroupId(v string) *HlsSettings {
11396	s.AudioGroupId = &v
11397	return s
11398}
11399
11400// SetAudioOnlyContainer sets the AudioOnlyContainer field's value.
11401func (s *HlsSettings) SetAudioOnlyContainer(v string) *HlsSettings {
11402	s.AudioOnlyContainer = &v
11403	return s
11404}
11405
11406// SetAudioRenditionSets sets the AudioRenditionSets field's value.
11407func (s *HlsSettings) SetAudioRenditionSets(v string) *HlsSettings {
11408	s.AudioRenditionSets = &v
11409	return s
11410}
11411
11412// SetAudioTrackType sets the AudioTrackType field's value.
11413func (s *HlsSettings) SetAudioTrackType(v string) *HlsSettings {
11414	s.AudioTrackType = &v
11415	return s
11416}
11417
11418// SetIFrameOnlyManifest sets the IFrameOnlyManifest field's value.
11419func (s *HlsSettings) SetIFrameOnlyManifest(v string) *HlsSettings {
11420	s.IFrameOnlyManifest = &v
11421	return s
11422}
11423
11424// SetSegmentModifier sets the SegmentModifier field's value.
11425func (s *HlsSettings) SetSegmentModifier(v string) *HlsSettings {
11426	s.SegmentModifier = &v
11427	return s
11428}
11429
11430// Optional. Configuration for a destination queue to which the job can hop
11431// once a customer-defined minimum wait time has passed.
11432type HopDestination struct {
11433	_ struct{} `type:"structure"`
11434
11435	// Optional. When you set up a job to use queue hopping, you can specify a different
11436	// relative priority for the job in the destination queue. If you don't specify,
11437	// the relative priority will remain the same as in the previous queue.
11438	Priority *int64 `locationName:"priority" type:"integer"`
11439
11440	// Optional unless the job is submitted on the default queue. When you set up
11441	// a job to use queue hopping, you can specify a destination queue. This queue
11442	// cannot be the original queue to which the job is submitted. If the original
11443	// queue isn't the default queue and you don't specify the destination queue,
11444	// the job will move to the default queue.
11445	Queue *string `locationName:"queue" type:"string"`
11446
11447	// Required for setting up a job to use queue hopping. Minimum wait time in
11448	// minutes until the job can hop to the destination queue. Valid range is 1
11449	// to 1440 minutes, inclusive.
11450	WaitMinutes *int64 `locationName:"waitMinutes" type:"integer"`
11451}
11452
11453// String returns the string representation
11454func (s HopDestination) String() string {
11455	return awsutil.Prettify(s)
11456}
11457
11458// GoString returns the string representation
11459func (s HopDestination) GoString() string {
11460	return s.String()
11461}
11462
11463// Validate inspects the fields of the type to determine if they are valid.
11464func (s *HopDestination) Validate() error {
11465	invalidParams := request.ErrInvalidParams{Context: "HopDestination"}
11466	if s.Priority != nil && *s.Priority < -50 {
11467		invalidParams.Add(request.NewErrParamMinValue("Priority", -50))
11468	}
11469
11470	if invalidParams.Len() > 0 {
11471		return invalidParams
11472	}
11473	return nil
11474}
11475
11476// SetPriority sets the Priority field's value.
11477func (s *HopDestination) SetPriority(v int64) *HopDestination {
11478	s.Priority = &v
11479	return s
11480}
11481
11482// SetQueue sets the Queue field's value.
11483func (s *HopDestination) SetQueue(v string) *HopDestination {
11484	s.Queue = &v
11485	return s
11486}
11487
11488// SetWaitMinutes sets the WaitMinutes field's value.
11489func (s *HopDestination) SetWaitMinutes(v int64) *HopDestination {
11490	s.WaitMinutes = &v
11491	return s
11492}
11493
11494// To insert ID3 tags in your output, specify two values. Use ID3 tag (Id3)
11495// to specify the base 64 encoded string and use Timecode (TimeCode) to specify
11496// the time when the tag should be inserted. To insert multiple ID3 tags in
11497// your output, create multiple instances of ID3 insertion (Id3Insertion).
11498type Id3Insertion struct {
11499	_ struct{} `type:"structure"`
11500
11501	// Use ID3 tag (Id3) to provide a tag value in base64-encode format.
11502	Id3 *string `locationName:"id3" type:"string"`
11503
11504	// Provide a Timecode (TimeCode) in HH:MM:SS:FF or HH:MM:SS;FF format.
11505	Timecode *string `locationName:"timecode" type:"string"`
11506}
11507
11508// String returns the string representation
11509func (s Id3Insertion) String() string {
11510	return awsutil.Prettify(s)
11511}
11512
11513// GoString returns the string representation
11514func (s Id3Insertion) GoString() string {
11515	return s.String()
11516}
11517
11518// SetId3 sets the Id3 field's value.
11519func (s *Id3Insertion) SetId3(v string) *Id3Insertion {
11520	s.Id3 = &v
11521	return s
11522}
11523
11524// SetTimecode sets the Timecode field's value.
11525func (s *Id3Insertion) SetTimecode(v string) *Id3Insertion {
11526	s.Timecode = &v
11527	return s
11528}
11529
11530// Enable the image inserter feature to include a graphic overlay on your video.
11531// Enable or disable this feature for each input or output individually. This
11532// setting is disabled by default.
11533type ImageInserter struct {
11534	_ struct{} `type:"structure"`
11535
11536	// Specify the images that you want to overlay on your video. The images must
11537	// be PNG or TGA files.
11538	InsertableImages []*InsertableImage `locationName:"insertableImages" type:"list"`
11539}
11540
11541// String returns the string representation
11542func (s ImageInserter) String() string {
11543	return awsutil.Prettify(s)
11544}
11545
11546// GoString returns the string representation
11547func (s ImageInserter) GoString() string {
11548	return s.String()
11549}
11550
11551// Validate inspects the fields of the type to determine if they are valid.
11552func (s *ImageInserter) Validate() error {
11553	invalidParams := request.ErrInvalidParams{Context: "ImageInserter"}
11554	if s.InsertableImages != nil {
11555		for i, v := range s.InsertableImages {
11556			if v == nil {
11557				continue
11558			}
11559			if err := v.Validate(); err != nil {
11560				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "InsertableImages", i), err.(request.ErrInvalidParams))
11561			}
11562		}
11563	}
11564
11565	if invalidParams.Len() > 0 {
11566		return invalidParams
11567	}
11568	return nil
11569}
11570
11571// SetInsertableImages sets the InsertableImages field's value.
11572func (s *ImageInserter) SetInsertableImages(v []*InsertableImage) *ImageInserter {
11573	s.InsertableImages = v
11574	return s
11575}
11576
11577// Settings specific to IMSC caption outputs.
11578type ImscDestinationSettings struct {
11579	_ struct{} `type:"structure"`
11580
11581	// Keep this setting enabled to have MediaConvert use the font style and position
11582	// information from the captions source in the output. This option is available
11583	// only when your input captions are IMSC, SMPTE-TT, or TTML. Disable this setting
11584	// for simplified output captions.
11585	StylePassthrough *string `locationName:"stylePassthrough" type:"string" enum:"ImscStylePassthrough"`
11586}
11587
11588// String returns the string representation
11589func (s ImscDestinationSettings) String() string {
11590	return awsutil.Prettify(s)
11591}
11592
11593// GoString returns the string representation
11594func (s ImscDestinationSettings) GoString() string {
11595	return s.String()
11596}
11597
11598// SetStylePassthrough sets the StylePassthrough field's value.
11599func (s *ImscDestinationSettings) SetStylePassthrough(v string) *ImscDestinationSettings {
11600	s.StylePassthrough = &v
11601	return s
11602}
11603
11604// Specifies media input
11605type Input struct {
11606	_ struct{} `type:"structure"`
11607
11608	// Specifies set of audio selectors within an input to combine. An input may
11609	// have multiple audio selector groups. See "Audio Selector Group":#inputs-audio_selector_group
11610	// for more information.
11611	AudioSelectorGroups map[string]*AudioSelectorGroup `locationName:"audioSelectorGroups" type:"map"`
11612
11613	// Use Audio selectors (AudioSelectors) to specify a track or set of tracks
11614	// from the input that you will use in your outputs. You can use multiple Audio
11615	// selectors per input.
11616	AudioSelectors map[string]*AudioSelector `locationName:"audioSelectors" type:"map"`
11617
11618	// Use captions selectors to specify the captions data from your input that
11619	// you use in your outputs. You can use up to 20 captions selectors per input.
11620	CaptionSelectors map[string]*CaptionSelector `locationName:"captionSelectors" type:"map"`
11621
11622	// Use Cropping selection (crop) to specify the video area that the service
11623	// will include in the output video frame. If you specify a value here, it will
11624	// override any value that you specify in the output setting Cropping selection
11625	// (crop).
11626	Crop *Rectangle `locationName:"crop" type:"structure"`
11627
11628	// Enable Deblock (InputDeblockFilter) to produce smoother motion in the output.
11629	// Default is disabled. Only manually controllable for MPEG2 and uncompressed
11630	// video inputs.
11631	DeblockFilter *string `locationName:"deblockFilter" type:"string" enum:"InputDeblockFilter"`
11632
11633	// Settings for decrypting any input files that you encrypt before you upload
11634	// them to Amazon S3. MediaConvert can decrypt files only when you use AWS Key
11635	// Management Service (KMS) to encrypt the data key that you use to encrypt
11636	// your content.
11637	DecryptionSettings *InputDecryptionSettings `locationName:"decryptionSettings" type:"structure"`
11638
11639	// Enable Denoise (InputDenoiseFilter) to filter noise from the input. Default
11640	// is disabled. Only applicable to MPEG2, H.264, H.265, and uncompressed video
11641	// inputs.
11642	DenoiseFilter *string `locationName:"denoiseFilter" type:"string" enum:"InputDenoiseFilter"`
11643
11644	// Specify the source file for your transcoding job. You can use multiple inputs
11645	// in a single job. The service concatenates these inputs, in the order that
11646	// you specify them in the job, to create the outputs. If your input format
11647	// is IMF, specify your input by providing the path to your CPL. For example,
11648	// "s3://bucket/vf/cpl.xml". If the CPL is in an incomplete IMP, make sure to
11649	// use *Supplemental IMPs* (SupplementalImps) to specify any supplemental IMPs
11650	// that contain assets referenced by the CPL.
11651	FileInput *string `locationName:"fileInput" type:"string"`
11652
11653	// Specify how the transcoding service applies the denoise and deblock filters.
11654	// You must also enable the filters separately, with Denoise (InputDenoiseFilter)
11655	// and Deblock (InputDeblockFilter). * Auto - The transcoding service determines
11656	// whether to apply filtering, depending on input type and quality. * Disable
11657	// - The input is not filtered. This is true even if you use the API to enable
11658	// them in (InputDeblockFilter) and (InputDeblockFilter). * Force - The input
11659	// is filtered regardless of input type.
11660	FilterEnable *string `locationName:"filterEnable" type:"string" enum:"InputFilterEnable"`
11661
11662	// Use Filter strength (FilterStrength) to adjust the magnitude the input filter
11663	// settings (Deblock and Denoise). The range is -5 to 5. Default is 0.
11664	FilterStrength *int64 `locationName:"filterStrength" type:"integer"`
11665
11666	// Enable the image inserter feature to include a graphic overlay on your video.
11667	// Enable or disable this feature for each input individually. This setting
11668	// is disabled by default.
11669	ImageInserter *ImageInserter `locationName:"imageInserter" type:"structure"`
11670
11671	// (InputClippings) contains sets of start and end times that together specify
11672	// a portion of the input to be used in the outputs. If you provide only a start
11673	// time, the clip will be the entire input from that point to the end. If you
11674	// provide only an end time, it will be the entire input up to that point. When
11675	// you specify more than one input clip, the transcoding service creates the
11676	// job outputs by stringing the clips together in the order you specify them.
11677	InputClippings []*InputClipping `locationName:"inputClippings" type:"list"`
11678
11679	// When you have a progressive segmented frame (PsF) input, use this setting
11680	// to flag the input as PsF. MediaConvert doesn't automatically detect PsF.
11681	// Therefore, flagging your input as PsF results in better preservation of video
11682	// quality when you do deinterlacing and frame rate conversion. If you don't
11683	// specify, the default value is Auto (AUTO). Auto is the correct setting for
11684	// all inputs that are not PsF. Don't set this value to PsF when your input
11685	// is interlaced. Doing so creates horizontal interlacing artifacts.
11686	InputScanType *string `locationName:"inputScanType" type:"string" enum:"InputScanType"`
11687
11688	// Use Selection placement (position) to define the video area in your output
11689	// frame. The area outside of the rectangle that you specify here is black.
11690	// If you specify a value here, it will override any value that you specify
11691	// in the output setting Selection placement (position). If you specify a value
11692	// here, this will override any AFD values in your input, even if you set Respond
11693	// to AFD (RespondToAfd) to Respond (RESPOND). If you specify a value here,
11694	// this will ignore anything that you specify for the setting Scaling Behavior
11695	// (scalingBehavior).
11696	Position *Rectangle `locationName:"position" type:"structure"`
11697
11698	// Use Program (programNumber) to select a specific program from within a multi-program
11699	// transport stream. Note that Quad 4K is not currently supported. Default is
11700	// the first program within the transport stream. If the program you specify
11701	// doesn't exist, the transcoding service will use this default.
11702	ProgramNumber *int64 `locationName:"programNumber" min:"1" type:"integer"`
11703
11704	// Set PSI control (InputPsiControl) for transport stream inputs to specify
11705	// which data the demux process to scans. * Ignore PSI - Scan all PIDs for audio
11706	// and video. * Use PSI - Scan only PSI data.
11707	PsiControl *string `locationName:"psiControl" type:"string" enum:"InputPsiControl"`
11708
11709	// Provide a list of any necessary supplemental IMPs. You need supplemental
11710	// IMPs if the CPL that you're using for your input is in an incomplete IMP.
11711	// Specify either the supplemental IMP directories with a trailing slash or
11712	// the ASSETMAP.xml files. For example ["s3://bucket/ov/", "s3://bucket/vf2/ASSETMAP.xml"].
11713	// You don't need to specify the IMP that contains your input CPL, because the
11714	// service automatically detects it.
11715	SupplementalImps []*string `locationName:"supplementalImps" type:"list"`
11716
11717	// Use this Timecode source setting, located under the input settings (InputTimecodeSource),
11718	// to specify how the service counts input video frames. This input frame count
11719	// affects only the behavior of features that apply to a single input at a time,
11720	// such as input clipping and synchronizing some captions formats. Choose Embedded
11721	// (EMBEDDED) to use the timecodes in your input video. Choose Start at zero
11722	// (ZEROBASED) to start the first frame at zero. Choose Specified start (SPECIFIEDSTART)
11723	// to start the first frame at the timecode that you specify in the setting
11724	// Start timecode (timecodeStart). If you don't specify a value for Timecode
11725	// source, the service will use Embedded by default. For more information about
11726	// timecodes, see https://docs.aws.amazon.com/console/mediaconvert/timecode.
11727	TimecodeSource *string `locationName:"timecodeSource" type:"string" enum:"InputTimecodeSource"`
11728
11729	// Specify the timecode that you want the service to use for this input's initial
11730	// frame. To use this setting, you must set the Timecode source setting, located
11731	// under the input settings (InputTimecodeSource), to Specified start (SPECIFIEDSTART).
11732	// For more information about timecodes, see https://docs.aws.amazon.com/console/mediaconvert/timecode.
11733	TimecodeStart *string `locationName:"timecodeStart" min:"11" type:"string"`
11734
11735	// Selector for video.
11736	VideoSelector *VideoSelector `locationName:"videoSelector" type:"structure"`
11737}
11738
11739// String returns the string representation
11740func (s Input) String() string {
11741	return awsutil.Prettify(s)
11742}
11743
11744// GoString returns the string representation
11745func (s Input) GoString() string {
11746	return s.String()
11747}
11748
11749// Validate inspects the fields of the type to determine if they are valid.
11750func (s *Input) Validate() error {
11751	invalidParams := request.ErrInvalidParams{Context: "Input"}
11752	if s.FilterStrength != nil && *s.FilterStrength < -5 {
11753		invalidParams.Add(request.NewErrParamMinValue("FilterStrength", -5))
11754	}
11755	if s.ProgramNumber != nil && *s.ProgramNumber < 1 {
11756		invalidParams.Add(request.NewErrParamMinValue("ProgramNumber", 1))
11757	}
11758	if s.TimecodeStart != nil && len(*s.TimecodeStart) < 11 {
11759		invalidParams.Add(request.NewErrParamMinLen("TimecodeStart", 11))
11760	}
11761	if s.AudioSelectors != nil {
11762		for i, v := range s.AudioSelectors {
11763			if v == nil {
11764				continue
11765			}
11766			if err := v.Validate(); err != nil {
11767				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "AudioSelectors", i), err.(request.ErrInvalidParams))
11768			}
11769		}
11770	}
11771	if s.CaptionSelectors != nil {
11772		for i, v := range s.CaptionSelectors {
11773			if v == nil {
11774				continue
11775			}
11776			if err := v.Validate(); err != nil {
11777				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "CaptionSelectors", i), err.(request.ErrInvalidParams))
11778			}
11779		}
11780	}
11781	if s.Crop != nil {
11782		if err := s.Crop.Validate(); err != nil {
11783			invalidParams.AddNested("Crop", err.(request.ErrInvalidParams))
11784		}
11785	}
11786	if s.DecryptionSettings != nil {
11787		if err := s.DecryptionSettings.Validate(); err != nil {
11788			invalidParams.AddNested("DecryptionSettings", err.(request.ErrInvalidParams))
11789		}
11790	}
11791	if s.ImageInserter != nil {
11792		if err := s.ImageInserter.Validate(); err != nil {
11793			invalidParams.AddNested("ImageInserter", err.(request.ErrInvalidParams))
11794		}
11795	}
11796	if s.Position != nil {
11797		if err := s.Position.Validate(); err != nil {
11798			invalidParams.AddNested("Position", err.(request.ErrInvalidParams))
11799		}
11800	}
11801	if s.VideoSelector != nil {
11802		if err := s.VideoSelector.Validate(); err != nil {
11803			invalidParams.AddNested("VideoSelector", err.(request.ErrInvalidParams))
11804		}
11805	}
11806
11807	if invalidParams.Len() > 0 {
11808		return invalidParams
11809	}
11810	return nil
11811}
11812
11813// SetAudioSelectorGroups sets the AudioSelectorGroups field's value.
11814func (s *Input) SetAudioSelectorGroups(v map[string]*AudioSelectorGroup) *Input {
11815	s.AudioSelectorGroups = v
11816	return s
11817}
11818
11819// SetAudioSelectors sets the AudioSelectors field's value.
11820func (s *Input) SetAudioSelectors(v map[string]*AudioSelector) *Input {
11821	s.AudioSelectors = v
11822	return s
11823}
11824
11825// SetCaptionSelectors sets the CaptionSelectors field's value.
11826func (s *Input) SetCaptionSelectors(v map[string]*CaptionSelector) *Input {
11827	s.CaptionSelectors = v
11828	return s
11829}
11830
11831// SetCrop sets the Crop field's value.
11832func (s *Input) SetCrop(v *Rectangle) *Input {
11833	s.Crop = v
11834	return s
11835}
11836
11837// SetDeblockFilter sets the DeblockFilter field's value.
11838func (s *Input) SetDeblockFilter(v string) *Input {
11839	s.DeblockFilter = &v
11840	return s
11841}
11842
11843// SetDecryptionSettings sets the DecryptionSettings field's value.
11844func (s *Input) SetDecryptionSettings(v *InputDecryptionSettings) *Input {
11845	s.DecryptionSettings = v
11846	return s
11847}
11848
11849// SetDenoiseFilter sets the DenoiseFilter field's value.
11850func (s *Input) SetDenoiseFilter(v string) *Input {
11851	s.DenoiseFilter = &v
11852	return s
11853}
11854
11855// SetFileInput sets the FileInput field's value.
11856func (s *Input) SetFileInput(v string) *Input {
11857	s.FileInput = &v
11858	return s
11859}
11860
11861// SetFilterEnable sets the FilterEnable field's value.
11862func (s *Input) SetFilterEnable(v string) *Input {
11863	s.FilterEnable = &v
11864	return s
11865}
11866
11867// SetFilterStrength sets the FilterStrength field's value.
11868func (s *Input) SetFilterStrength(v int64) *Input {
11869	s.FilterStrength = &v
11870	return s
11871}
11872
11873// SetImageInserter sets the ImageInserter field's value.
11874func (s *Input) SetImageInserter(v *ImageInserter) *Input {
11875	s.ImageInserter = v
11876	return s
11877}
11878
11879// SetInputClippings sets the InputClippings field's value.
11880func (s *Input) SetInputClippings(v []*InputClipping) *Input {
11881	s.InputClippings = v
11882	return s
11883}
11884
11885// SetInputScanType sets the InputScanType field's value.
11886func (s *Input) SetInputScanType(v string) *Input {
11887	s.InputScanType = &v
11888	return s
11889}
11890
11891// SetPosition sets the Position field's value.
11892func (s *Input) SetPosition(v *Rectangle) *Input {
11893	s.Position = v
11894	return s
11895}
11896
11897// SetProgramNumber sets the ProgramNumber field's value.
11898func (s *Input) SetProgramNumber(v int64) *Input {
11899	s.ProgramNumber = &v
11900	return s
11901}
11902
11903// SetPsiControl sets the PsiControl field's value.
11904func (s *Input) SetPsiControl(v string) *Input {
11905	s.PsiControl = &v
11906	return s
11907}
11908
11909// SetSupplementalImps sets the SupplementalImps field's value.
11910func (s *Input) SetSupplementalImps(v []*string) *Input {
11911	s.SupplementalImps = v
11912	return s
11913}
11914
11915// SetTimecodeSource sets the TimecodeSource field's value.
11916func (s *Input) SetTimecodeSource(v string) *Input {
11917	s.TimecodeSource = &v
11918	return s
11919}
11920
11921// SetTimecodeStart sets the TimecodeStart field's value.
11922func (s *Input) SetTimecodeStart(v string) *Input {
11923	s.TimecodeStart = &v
11924	return s
11925}
11926
11927// SetVideoSelector sets the VideoSelector field's value.
11928func (s *Input) SetVideoSelector(v *VideoSelector) *Input {
11929	s.VideoSelector = v
11930	return s
11931}
11932
11933// To transcode only portions of your input (clips), include one Input clipping
11934// (one instance of InputClipping in the JSON job file) for each input clip.
11935// All input clips you specify will be included in every output of the job.
11936type InputClipping struct {
11937	_ struct{} `type:"structure"`
11938
11939	// Set End timecode (EndTimecode) to the end of the portion of the input you
11940	// are clipping. The frame corresponding to the End timecode value is included
11941	// in the clip. Start timecode or End timecode may be left blank, but not both.
11942	// Use the format HH:MM:SS:FF or HH:MM:SS;FF, where HH is the hour, MM is the
11943	// minute, SS is the second, and FF is the frame number. When choosing this
11944	// value, take into account your setting for timecode source under input settings
11945	// (InputTimecodeSource). For example, if you have embedded timecodes that start
11946	// at 01:00:00:00 and you want your clip to end six minutes into the video,
11947	// use 01:06:00:00.
11948	EndTimecode *string `locationName:"endTimecode" type:"string"`
11949
11950	// Set Start timecode (StartTimecode) to the beginning of the portion of the
11951	// input you are clipping. The frame corresponding to the Start timecode value
11952	// is included in the clip. Start timecode or End timecode may be left blank,
11953	// but not both. Use the format HH:MM:SS:FF or HH:MM:SS;FF, where HH is the
11954	// hour, MM is the minute, SS is the second, and FF is the frame number. When
11955	// choosing this value, take into account your setting for Input timecode source.
11956	// For example, if you have embedded timecodes that start at 01:00:00:00 and
11957	// you want your clip to begin five minutes into the video, use 01:05:00:00.
11958	StartTimecode *string `locationName:"startTimecode" type:"string"`
11959}
11960
11961// String returns the string representation
11962func (s InputClipping) String() string {
11963	return awsutil.Prettify(s)
11964}
11965
11966// GoString returns the string representation
11967func (s InputClipping) GoString() string {
11968	return s.String()
11969}
11970
11971// SetEndTimecode sets the EndTimecode field's value.
11972func (s *InputClipping) SetEndTimecode(v string) *InputClipping {
11973	s.EndTimecode = &v
11974	return s
11975}
11976
11977// SetStartTimecode sets the StartTimecode field's value.
11978func (s *InputClipping) SetStartTimecode(v string) *InputClipping {
11979	s.StartTimecode = &v
11980	return s
11981}
11982
11983// Settings for decrypting any input files that you encrypt before you upload
11984// them to Amazon S3. MediaConvert can decrypt files only when you use AWS Key
11985// Management Service (KMS) to encrypt the data key that you use to encrypt
11986// your content.
11987type InputDecryptionSettings struct {
11988	_ struct{} `type:"structure"`
11989
11990	// Specify the encryption mode that you used to encrypt your input files.
11991	DecryptionMode *string `locationName:"decryptionMode" type:"string" enum:"DecryptionMode"`
11992
11993	// Warning! Don't provide your encryption key in plaintext. Your job settings
11994	// could be intercepted, making your encrypted content vulnerable. Specify the
11995	// encrypted version of the data key that you used to encrypt your content.
11996	// The data key must be encrypted by AWS Key Management Service (KMS). The key
11997	// can be 128, 192, or 256 bits.
11998	EncryptedDecryptionKey *string `locationName:"encryptedDecryptionKey" min:"24" type:"string"`
11999
12000	// Specify the initialization vector that you used when you encrypted your content
12001	// before uploading it to Amazon S3. You can use a 16-byte initialization vector
12002	// with any encryption mode. Or, you can use a 12-byte initialization vector
12003	// with GCM or CTR. MediaConvert accepts only initialization vectors that are
12004	// base64-encoded.
12005	InitializationVector *string `locationName:"initializationVector" min:"16" type:"string"`
12006
12007	// Specify the AWS Region for AWS Key Management Service (KMS) that you used
12008	// to encrypt your data key, if that Region is different from the one you are
12009	// using for AWS Elemental MediaConvert.
12010	KmsKeyRegion *string `locationName:"kmsKeyRegion" min:"9" type:"string"`
12011}
12012
12013// String returns the string representation
12014func (s InputDecryptionSettings) String() string {
12015	return awsutil.Prettify(s)
12016}
12017
12018// GoString returns the string representation
12019func (s InputDecryptionSettings) GoString() string {
12020	return s.String()
12021}
12022
12023// Validate inspects the fields of the type to determine if they are valid.
12024func (s *InputDecryptionSettings) Validate() error {
12025	invalidParams := request.ErrInvalidParams{Context: "InputDecryptionSettings"}
12026	if s.EncryptedDecryptionKey != nil && len(*s.EncryptedDecryptionKey) < 24 {
12027		invalidParams.Add(request.NewErrParamMinLen("EncryptedDecryptionKey", 24))
12028	}
12029	if s.InitializationVector != nil && len(*s.InitializationVector) < 16 {
12030		invalidParams.Add(request.NewErrParamMinLen("InitializationVector", 16))
12031	}
12032	if s.KmsKeyRegion != nil && len(*s.KmsKeyRegion) < 9 {
12033		invalidParams.Add(request.NewErrParamMinLen("KmsKeyRegion", 9))
12034	}
12035
12036	if invalidParams.Len() > 0 {
12037		return invalidParams
12038	}
12039	return nil
12040}
12041
12042// SetDecryptionMode sets the DecryptionMode field's value.
12043func (s *InputDecryptionSettings) SetDecryptionMode(v string) *InputDecryptionSettings {
12044	s.DecryptionMode = &v
12045	return s
12046}
12047
12048// SetEncryptedDecryptionKey sets the EncryptedDecryptionKey field's value.
12049func (s *InputDecryptionSettings) SetEncryptedDecryptionKey(v string) *InputDecryptionSettings {
12050	s.EncryptedDecryptionKey = &v
12051	return s
12052}
12053
12054// SetInitializationVector sets the InitializationVector field's value.
12055func (s *InputDecryptionSettings) SetInitializationVector(v string) *InputDecryptionSettings {
12056	s.InitializationVector = &v
12057	return s
12058}
12059
12060// SetKmsKeyRegion sets the KmsKeyRegion field's value.
12061func (s *InputDecryptionSettings) SetKmsKeyRegion(v string) *InputDecryptionSettings {
12062	s.KmsKeyRegion = &v
12063	return s
12064}
12065
12066// Specified video input in a template.
12067type InputTemplate struct {
12068	_ struct{} `type:"structure"`
12069
12070	// Specifies set of audio selectors within an input to combine. An input may
12071	// have multiple audio selector groups. See "Audio Selector Group":#inputs-audio_selector_group
12072	// for more information.
12073	AudioSelectorGroups map[string]*AudioSelectorGroup `locationName:"audioSelectorGroups" type:"map"`
12074
12075	// Use Audio selectors (AudioSelectors) to specify a track or set of tracks
12076	// from the input that you will use in your outputs. You can use multiple Audio
12077	// selectors per input.
12078	AudioSelectors map[string]*AudioSelector `locationName:"audioSelectors" type:"map"`
12079
12080	// Use captions selectors to specify the captions data from your input that
12081	// you use in your outputs. You can use up to 20 captions selectors per input.
12082	CaptionSelectors map[string]*CaptionSelector `locationName:"captionSelectors" type:"map"`
12083
12084	// Use Cropping selection (crop) to specify the video area that the service
12085	// will include in the output video frame. If you specify a value here, it will
12086	// override any value that you specify in the output setting Cropping selection
12087	// (crop).
12088	Crop *Rectangle `locationName:"crop" type:"structure"`
12089
12090	// Enable Deblock (InputDeblockFilter) to produce smoother motion in the output.
12091	// Default is disabled. Only manually controllable for MPEG2 and uncompressed
12092	// video inputs.
12093	DeblockFilter *string `locationName:"deblockFilter" type:"string" enum:"InputDeblockFilter"`
12094
12095	// Enable Denoise (InputDenoiseFilter) to filter noise from the input. Default
12096	// is disabled. Only applicable to MPEG2, H.264, H.265, and uncompressed video
12097	// inputs.
12098	DenoiseFilter *string `locationName:"denoiseFilter" type:"string" enum:"InputDenoiseFilter"`
12099
12100	// Specify how the transcoding service applies the denoise and deblock filters.
12101	// You must also enable the filters separately, with Denoise (InputDenoiseFilter)
12102	// and Deblock (InputDeblockFilter). * Auto - The transcoding service determines
12103	// whether to apply filtering, depending on input type and quality. * Disable
12104	// - The input is not filtered. This is true even if you use the API to enable
12105	// them in (InputDeblockFilter) and (InputDeblockFilter). * Force - The input
12106	// is filtered regardless of input type.
12107	FilterEnable *string `locationName:"filterEnable" type:"string" enum:"InputFilterEnable"`
12108
12109	// Use Filter strength (FilterStrength) to adjust the magnitude the input filter
12110	// settings (Deblock and Denoise). The range is -5 to 5. Default is 0.
12111	FilterStrength *int64 `locationName:"filterStrength" type:"integer"`
12112
12113	// Enable the image inserter feature to include a graphic overlay on your video.
12114	// Enable or disable this feature for each input individually. This setting
12115	// is disabled by default.
12116	ImageInserter *ImageInserter `locationName:"imageInserter" type:"structure"`
12117
12118	// (InputClippings) contains sets of start and end times that together specify
12119	// a portion of the input to be used in the outputs. If you provide only a start
12120	// time, the clip will be the entire input from that point to the end. If you
12121	// provide only an end time, it will be the entire input up to that point. When
12122	// you specify more than one input clip, the transcoding service creates the
12123	// job outputs by stringing the clips together in the order you specify them.
12124	InputClippings []*InputClipping `locationName:"inputClippings" type:"list"`
12125
12126	// When you have a progressive segmented frame (PsF) input, use this setting
12127	// to flag the input as PsF. MediaConvert doesn't automatically detect PsF.
12128	// Therefore, flagging your input as PsF results in better preservation of video
12129	// quality when you do deinterlacing and frame rate conversion. If you don't
12130	// specify, the default value is Auto (AUTO). Auto is the correct setting for
12131	// all inputs that are not PsF. Don't set this value to PsF when your input
12132	// is interlaced. Doing so creates horizontal interlacing artifacts.
12133	InputScanType *string `locationName:"inputScanType" type:"string" enum:"InputScanType"`
12134
12135	// Use Selection placement (position) to define the video area in your output
12136	// frame. The area outside of the rectangle that you specify here is black.
12137	// If you specify a value here, it will override any value that you specify
12138	// in the output setting Selection placement (position). If you specify a value
12139	// here, this will override any AFD values in your input, even if you set Respond
12140	// to AFD (RespondToAfd) to Respond (RESPOND). If you specify a value here,
12141	// this will ignore anything that you specify for the setting Scaling Behavior
12142	// (scalingBehavior).
12143	Position *Rectangle `locationName:"position" type:"structure"`
12144
12145	// Use Program (programNumber) to select a specific program from within a multi-program
12146	// transport stream. Note that Quad 4K is not currently supported. Default is
12147	// the first program within the transport stream. If the program you specify
12148	// doesn't exist, the transcoding service will use this default.
12149	ProgramNumber *int64 `locationName:"programNumber" min:"1" type:"integer"`
12150
12151	// Set PSI control (InputPsiControl) for transport stream inputs to specify
12152	// which data the demux process to scans. * Ignore PSI - Scan all PIDs for audio
12153	// and video. * Use PSI - Scan only PSI data.
12154	PsiControl *string `locationName:"psiControl" type:"string" enum:"InputPsiControl"`
12155
12156	// Use this Timecode source setting, located under the input settings (InputTimecodeSource),
12157	// to specify how the service counts input video frames. This input frame count
12158	// affects only the behavior of features that apply to a single input at a time,
12159	// such as input clipping and synchronizing some captions formats. Choose Embedded
12160	// (EMBEDDED) to use the timecodes in your input video. Choose Start at zero
12161	// (ZEROBASED) to start the first frame at zero. Choose Specified start (SPECIFIEDSTART)
12162	// to start the first frame at the timecode that you specify in the setting
12163	// Start timecode (timecodeStart). If you don't specify a value for Timecode
12164	// source, the service will use Embedded by default. For more information about
12165	// timecodes, see https://docs.aws.amazon.com/console/mediaconvert/timecode.
12166	TimecodeSource *string `locationName:"timecodeSource" type:"string" enum:"InputTimecodeSource"`
12167
12168	// Specify the timecode that you want the service to use for this input's initial
12169	// frame. To use this setting, you must set the Timecode source setting, located
12170	// under the input settings (InputTimecodeSource), to Specified start (SPECIFIEDSTART).
12171	// For more information about timecodes, see https://docs.aws.amazon.com/console/mediaconvert/timecode.
12172	TimecodeStart *string `locationName:"timecodeStart" min:"11" type:"string"`
12173
12174	// Selector for video.
12175	VideoSelector *VideoSelector `locationName:"videoSelector" type:"structure"`
12176}
12177
12178// String returns the string representation
12179func (s InputTemplate) String() string {
12180	return awsutil.Prettify(s)
12181}
12182
12183// GoString returns the string representation
12184func (s InputTemplate) GoString() string {
12185	return s.String()
12186}
12187
12188// Validate inspects the fields of the type to determine if they are valid.
12189func (s *InputTemplate) Validate() error {
12190	invalidParams := request.ErrInvalidParams{Context: "InputTemplate"}
12191	if s.FilterStrength != nil && *s.FilterStrength < -5 {
12192		invalidParams.Add(request.NewErrParamMinValue("FilterStrength", -5))
12193	}
12194	if s.ProgramNumber != nil && *s.ProgramNumber < 1 {
12195		invalidParams.Add(request.NewErrParamMinValue("ProgramNumber", 1))
12196	}
12197	if s.TimecodeStart != nil && len(*s.TimecodeStart) < 11 {
12198		invalidParams.Add(request.NewErrParamMinLen("TimecodeStart", 11))
12199	}
12200	if s.AudioSelectors != nil {
12201		for i, v := range s.AudioSelectors {
12202			if v == nil {
12203				continue
12204			}
12205			if err := v.Validate(); err != nil {
12206				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "AudioSelectors", i), err.(request.ErrInvalidParams))
12207			}
12208		}
12209	}
12210	if s.CaptionSelectors != nil {
12211		for i, v := range s.CaptionSelectors {
12212			if v == nil {
12213				continue
12214			}
12215			if err := v.Validate(); err != nil {
12216				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "CaptionSelectors", i), err.(request.ErrInvalidParams))
12217			}
12218		}
12219	}
12220	if s.Crop != nil {
12221		if err := s.Crop.Validate(); err != nil {
12222			invalidParams.AddNested("Crop", err.(request.ErrInvalidParams))
12223		}
12224	}
12225	if s.ImageInserter != nil {
12226		if err := s.ImageInserter.Validate(); err != nil {
12227			invalidParams.AddNested("ImageInserter", err.(request.ErrInvalidParams))
12228		}
12229	}
12230	if s.Position != nil {
12231		if err := s.Position.Validate(); err != nil {
12232			invalidParams.AddNested("Position", err.(request.ErrInvalidParams))
12233		}
12234	}
12235	if s.VideoSelector != nil {
12236		if err := s.VideoSelector.Validate(); err != nil {
12237			invalidParams.AddNested("VideoSelector", err.(request.ErrInvalidParams))
12238		}
12239	}
12240
12241	if invalidParams.Len() > 0 {
12242		return invalidParams
12243	}
12244	return nil
12245}
12246
12247// SetAudioSelectorGroups sets the AudioSelectorGroups field's value.
12248func (s *InputTemplate) SetAudioSelectorGroups(v map[string]*AudioSelectorGroup) *InputTemplate {
12249	s.AudioSelectorGroups = v
12250	return s
12251}
12252
12253// SetAudioSelectors sets the AudioSelectors field's value.
12254func (s *InputTemplate) SetAudioSelectors(v map[string]*AudioSelector) *InputTemplate {
12255	s.AudioSelectors = v
12256	return s
12257}
12258
12259// SetCaptionSelectors sets the CaptionSelectors field's value.
12260func (s *InputTemplate) SetCaptionSelectors(v map[string]*CaptionSelector) *InputTemplate {
12261	s.CaptionSelectors = v
12262	return s
12263}
12264
12265// SetCrop sets the Crop field's value.
12266func (s *InputTemplate) SetCrop(v *Rectangle) *InputTemplate {
12267	s.Crop = v
12268	return s
12269}
12270
12271// SetDeblockFilter sets the DeblockFilter field's value.
12272func (s *InputTemplate) SetDeblockFilter(v string) *InputTemplate {
12273	s.DeblockFilter = &v
12274	return s
12275}
12276
12277// SetDenoiseFilter sets the DenoiseFilter field's value.
12278func (s *InputTemplate) SetDenoiseFilter(v string) *InputTemplate {
12279	s.DenoiseFilter = &v
12280	return s
12281}
12282
12283// SetFilterEnable sets the FilterEnable field's value.
12284func (s *InputTemplate) SetFilterEnable(v string) *InputTemplate {
12285	s.FilterEnable = &v
12286	return s
12287}
12288
12289// SetFilterStrength sets the FilterStrength field's value.
12290func (s *InputTemplate) SetFilterStrength(v int64) *InputTemplate {
12291	s.FilterStrength = &v
12292	return s
12293}
12294
12295// SetImageInserter sets the ImageInserter field's value.
12296func (s *InputTemplate) SetImageInserter(v *ImageInserter) *InputTemplate {
12297	s.ImageInserter = v
12298	return s
12299}
12300
12301// SetInputClippings sets the InputClippings field's value.
12302func (s *InputTemplate) SetInputClippings(v []*InputClipping) *InputTemplate {
12303	s.InputClippings = v
12304	return s
12305}
12306
12307// SetInputScanType sets the InputScanType field's value.
12308func (s *InputTemplate) SetInputScanType(v string) *InputTemplate {
12309	s.InputScanType = &v
12310	return s
12311}
12312
12313// SetPosition sets the Position field's value.
12314func (s *InputTemplate) SetPosition(v *Rectangle) *InputTemplate {
12315	s.Position = v
12316	return s
12317}
12318
12319// SetProgramNumber sets the ProgramNumber field's value.
12320func (s *InputTemplate) SetProgramNumber(v int64) *InputTemplate {
12321	s.ProgramNumber = &v
12322	return s
12323}
12324
12325// SetPsiControl sets the PsiControl field's value.
12326func (s *InputTemplate) SetPsiControl(v string) *InputTemplate {
12327	s.PsiControl = &v
12328	return s
12329}
12330
12331// SetTimecodeSource sets the TimecodeSource field's value.
12332func (s *InputTemplate) SetTimecodeSource(v string) *InputTemplate {
12333	s.TimecodeSource = &v
12334	return s
12335}
12336
12337// SetTimecodeStart sets the TimecodeStart field's value.
12338func (s *InputTemplate) SetTimecodeStart(v string) *InputTemplate {
12339	s.TimecodeStart = &v
12340	return s
12341}
12342
12343// SetVideoSelector sets the VideoSelector field's value.
12344func (s *InputTemplate) SetVideoSelector(v *VideoSelector) *InputTemplate {
12345	s.VideoSelector = v
12346	return s
12347}
12348
12349// Settings that specify how your still graphic overlay appears.
12350type InsertableImage struct {
12351	_ struct{} `type:"structure"`
12352
12353	// Specify the time, in milliseconds, for the image to remain on the output
12354	// video. This duration includes fade-in time but not fade-out time.
12355	Duration *int64 `locationName:"duration" type:"integer"`
12356
12357	// Specify the length of time, in milliseconds, between the Start time that
12358	// you specify for the image insertion and the time that the image appears at
12359	// full opacity. Full opacity is the level that you specify for the opacity
12360	// setting. If you don't specify a value for Fade-in, the image will appear
12361	// abruptly at the overlay start time.
12362	FadeIn *int64 `locationName:"fadeIn" type:"integer"`
12363
12364	// Specify the length of time, in milliseconds, between the end of the time
12365	// that you have specified for the image overlay Duration and when the overlaid
12366	// image has faded to total transparency. If you don't specify a value for Fade-out,
12367	// the image will disappear abruptly at the end of the inserted image duration.
12368	FadeOut *int64 `locationName:"fadeOut" type:"integer"`
12369
12370	// Specify the height of the inserted image in pixels. If you specify a value
12371	// that's larger than the video resolution height, the service will crop your
12372	// overlaid image to fit. To use the native height of the image, keep this setting
12373	// blank.
12374	Height *int64 `locationName:"height" type:"integer"`
12375
12376	// Specify the HTTP, HTTPS, or Amazon S3 location of the image that you want
12377	// to overlay on the video. Use a PNG or TGA file.
12378	ImageInserterInput *string `locationName:"imageInserterInput" min:"14" type:"string"`
12379
12380	// Specify the distance, in pixels, between the inserted image and the left
12381	// edge of the video frame. Required for any image overlay that you specify.
12382	ImageX *int64 `locationName:"imageX" type:"integer"`
12383
12384	// Specify the distance, in pixels, between the overlaid image and the top edge
12385	// of the video frame. Required for any image overlay that you specify.
12386	ImageY *int64 `locationName:"imageY" type:"integer"`
12387
12388	// Specify how overlapping inserted images appear. Images with higher values
12389	// for Layer appear on top of images with lower values for Layer.
12390	Layer *int64 `locationName:"layer" type:"integer"`
12391
12392	// Use Opacity (Opacity) to specify how much of the underlying video shows through
12393	// the inserted image. 0 is transparent and 100 is fully opaque. Default is
12394	// 50.
12395	Opacity *int64 `locationName:"opacity" type:"integer"`
12396
12397	// Specify the timecode of the frame that you want the overlay to first appear
12398	// on. This must be in timecode (HH:MM:SS:FF or HH:MM:SS;FF) format. Remember
12399	// to take into account your timecode source settings.
12400	StartTime *string `locationName:"startTime" type:"string"`
12401
12402	// Specify the width of the inserted image in pixels. If you specify a value
12403	// that's larger than the video resolution width, the service will crop your
12404	// overlaid image to fit. To use the native width of the image, keep this setting
12405	// blank.
12406	Width *int64 `locationName:"width" type:"integer"`
12407}
12408
12409// String returns the string representation
12410func (s InsertableImage) String() string {
12411	return awsutil.Prettify(s)
12412}
12413
12414// GoString returns the string representation
12415func (s InsertableImage) GoString() string {
12416	return s.String()
12417}
12418
12419// Validate inspects the fields of the type to determine if they are valid.
12420func (s *InsertableImage) Validate() error {
12421	invalidParams := request.ErrInvalidParams{Context: "InsertableImage"}
12422	if s.ImageInserterInput != nil && len(*s.ImageInserterInput) < 14 {
12423		invalidParams.Add(request.NewErrParamMinLen("ImageInserterInput", 14))
12424	}
12425
12426	if invalidParams.Len() > 0 {
12427		return invalidParams
12428	}
12429	return nil
12430}
12431
12432// SetDuration sets the Duration field's value.
12433func (s *InsertableImage) SetDuration(v int64) *InsertableImage {
12434	s.Duration = &v
12435	return s
12436}
12437
12438// SetFadeIn sets the FadeIn field's value.
12439func (s *InsertableImage) SetFadeIn(v int64) *InsertableImage {
12440	s.FadeIn = &v
12441	return s
12442}
12443
12444// SetFadeOut sets the FadeOut field's value.
12445func (s *InsertableImage) SetFadeOut(v int64) *InsertableImage {
12446	s.FadeOut = &v
12447	return s
12448}
12449
12450// SetHeight sets the Height field's value.
12451func (s *InsertableImage) SetHeight(v int64) *InsertableImage {
12452	s.Height = &v
12453	return s
12454}
12455
12456// SetImageInserterInput sets the ImageInserterInput field's value.
12457func (s *InsertableImage) SetImageInserterInput(v string) *InsertableImage {
12458	s.ImageInserterInput = &v
12459	return s
12460}
12461
12462// SetImageX sets the ImageX field's value.
12463func (s *InsertableImage) SetImageX(v int64) *InsertableImage {
12464	s.ImageX = &v
12465	return s
12466}
12467
12468// SetImageY sets the ImageY field's value.
12469func (s *InsertableImage) SetImageY(v int64) *InsertableImage {
12470	s.ImageY = &v
12471	return s
12472}
12473
12474// SetLayer sets the Layer field's value.
12475func (s *InsertableImage) SetLayer(v int64) *InsertableImage {
12476	s.Layer = &v
12477	return s
12478}
12479
12480// SetOpacity sets the Opacity field's value.
12481func (s *InsertableImage) SetOpacity(v int64) *InsertableImage {
12482	s.Opacity = &v
12483	return s
12484}
12485
12486// SetStartTime sets the StartTime field's value.
12487func (s *InsertableImage) SetStartTime(v string) *InsertableImage {
12488	s.StartTime = &v
12489	return s
12490}
12491
12492// SetWidth sets the Width field's value.
12493func (s *InsertableImage) SetWidth(v int64) *InsertableImage {
12494	s.Width = &v
12495	return s
12496}
12497
12498type InternalServerErrorException struct {
12499	_            struct{}                  `type:"structure"`
12500	RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"`
12501
12502	Message_ *string `locationName:"message" type:"string"`
12503}
12504
12505// String returns the string representation
12506func (s InternalServerErrorException) String() string {
12507	return awsutil.Prettify(s)
12508}
12509
12510// GoString returns the string representation
12511func (s InternalServerErrorException) GoString() string {
12512	return s.String()
12513}
12514
12515func newErrorInternalServerErrorException(v protocol.ResponseMetadata) error {
12516	return &InternalServerErrorException{
12517		RespMetadata: v,
12518	}
12519}
12520
12521// Code returns the exception type name.
12522func (s *InternalServerErrorException) Code() string {
12523	return "InternalServerErrorException"
12524}
12525
12526// Message returns the exception's message.
12527func (s *InternalServerErrorException) Message() string {
12528	if s.Message_ != nil {
12529		return *s.Message_
12530	}
12531	return ""
12532}
12533
12534// OrigErr always returns nil, satisfies awserr.Error interface.
12535func (s *InternalServerErrorException) OrigErr() error {
12536	return nil
12537}
12538
12539func (s *InternalServerErrorException) Error() string {
12540	return fmt.Sprintf("%s: %s", s.Code(), s.Message())
12541}
12542
12543// Status code returns the HTTP status code for the request's response error.
12544func (s *InternalServerErrorException) StatusCode() int {
12545	return s.RespMetadata.StatusCode
12546}
12547
12548// RequestID returns the service's response RequestID for request.
12549func (s *InternalServerErrorException) RequestID() string {
12550	return s.RespMetadata.RequestID
12551}
12552
12553// Each job converts an input file into an output file or files. For more information,
12554// see the User Guide at https://docs.aws.amazon.com/mediaconvert/latest/ug/what-is.html
12555type Job struct {
12556	_ struct{} `type:"structure"`
12557
12558	// Accelerated transcoding can significantly speed up jobs with long, visually
12559	// complex content.
12560	AccelerationSettings *AccelerationSettings `locationName:"accelerationSettings" type:"structure"`
12561
12562	// Describes whether the current job is running with accelerated transcoding.
12563	// For jobs that have Acceleration (AccelerationMode) set to DISABLED, AccelerationStatus
12564	// is always NOT_APPLICABLE. For jobs that have Acceleration (AccelerationMode)
12565	// set to ENABLED or PREFERRED, AccelerationStatus is one of the other states.
12566	// AccelerationStatus is IN_PROGRESS initially, while the service determines
12567	// whether the input files and job settings are compatible with accelerated
12568	// transcoding. If they are, AcclerationStatus is ACCELERATED. If your input
12569	// files and job settings aren't compatible with accelerated transcoding, the
12570	// service either fails your job or runs it without accelerated transcoding,
12571	// depending on how you set Acceleration (AccelerationMode). When the service
12572	// runs your job without accelerated transcoding, AccelerationStatus is NOT_ACCELERATED.
12573	AccelerationStatus *string `locationName:"accelerationStatus" type:"string" enum:"AccelerationStatus"`
12574
12575	// An identifier for this resource that is unique within all of AWS.
12576	Arn *string `locationName:"arn" type:"string"`
12577
12578	// The tag type that AWS Billing and Cost Management will use to sort your AWS
12579	// Elemental MediaConvert costs on any billing report that you set up.
12580	BillingTagsSource *string `locationName:"billingTagsSource" type:"string" enum:"BillingTagsSource"`
12581
12582	// The time, in Unix epoch format in seconds, when the job got created.
12583	CreatedAt *time.Time `locationName:"createdAt" type:"timestamp" timestampFormat:"unixTimestamp"`
12584
12585	// A job's phase can be PROBING, TRANSCODING OR UPLOADING
12586	CurrentPhase *string `locationName:"currentPhase" type:"string" enum:"JobPhase"`
12587
12588	// Error code for the job
12589	ErrorCode *int64 `locationName:"errorCode" type:"integer"`
12590
12591	// Error message of Job
12592	ErrorMessage *string `locationName:"errorMessage" type:"string"`
12593
12594	// Optional list of hop destinations.
12595	HopDestinations []*HopDestination `locationName:"hopDestinations" type:"list"`
12596
12597	// A portion of the job's ARN, unique within your AWS Elemental MediaConvert
12598	// resources
12599	Id *string `locationName:"id" type:"string"`
12600
12601	// An estimate of how far your job has progressed. This estimate is shown as
12602	// a percentage of the total time from when your job leaves its queue to when
12603	// your output files appear in your output Amazon S3 bucket. AWS Elemental MediaConvert
12604	// provides jobPercentComplete in CloudWatch STATUS_UPDATE events and in the
12605	// response to GetJob and ListJobs requests. The jobPercentComplete estimate
12606	// is reliable for the following input containers: Quicktime, Transport Stream,
12607	// MP4, and MXF. For some jobs, the service can't provide information about
12608	// job progress. In those cases, jobPercentComplete returns a null value.
12609	JobPercentComplete *int64 `locationName:"jobPercentComplete" type:"integer"`
12610
12611	// The job template that the job is created from, if it is created from a job
12612	// template.
12613	JobTemplate *string `locationName:"jobTemplate" type:"string"`
12614
12615	// Provides messages from the service about jobs that you have already successfully
12616	// submitted.
12617	Messages *JobMessages `locationName:"messages" type:"structure"`
12618
12619	// List of output group details
12620	OutputGroupDetails []*OutputGroupDetail `locationName:"outputGroupDetails" type:"list"`
12621
12622	// Relative priority on the job.
12623	Priority *int64 `locationName:"priority" type:"integer"`
12624
12625	// When you create a job, you can specify a queue to send it to. If you don't
12626	// specify, the job will go to the default queue. For more about queues, see
12627	// the User Guide topic at https://docs.aws.amazon.com/mediaconvert/latest/ug/what-is.html
12628	Queue *string `locationName:"queue" type:"string"`
12629
12630	// The job's queue hopping history.
12631	QueueTransitions []*QueueTransition `locationName:"queueTransitions" type:"list"`
12632
12633	// The number of times that the service automatically attempted to process your
12634	// job after encountering an error.
12635	RetryCount *int64 `locationName:"retryCount" type:"integer"`
12636
12637	// The IAM role you use for creating this job. For details about permissions,
12638	// see the User Guide topic at the User Guide at https://docs.aws.amazon.com/mediaconvert/latest/ug/iam-role.html
12639	//
12640	// Role is a required field
12641	Role *string `locationName:"role" type:"string" required:"true"`
12642
12643	// JobSettings contains all the transcode settings for a job.
12644	//
12645	// Settings is a required field
12646	Settings *JobSettings `locationName:"settings" type:"structure" required:"true"`
12647
12648	// Enable this setting when you run a test job to estimate how many reserved
12649	// transcoding slots (RTS) you need. When this is enabled, MediaConvert runs
12650	// your job from an on-demand queue with similar performance to what you will
12651	// see with one RTS in a reserved queue. This setting is disabled by default.
12652	SimulateReservedQueue *string `locationName:"simulateReservedQueue" type:"string" enum:"SimulateReservedQueue"`
12653
12654	// A job's status can be SUBMITTED, PROGRESSING, COMPLETE, CANCELED, or ERROR.
12655	Status *string `locationName:"status" type:"string" enum:"JobStatus"`
12656
12657	// Specify how often MediaConvert sends STATUS_UPDATE events to Amazon CloudWatch
12658	// Events. Set the interval, in seconds, between status updates. MediaConvert
12659	// sends an update at this interval from the time the service begins processing
12660	// your job to the time it completes the transcode or encounters an error.
12661	StatusUpdateInterval *string `locationName:"statusUpdateInterval" type:"string" enum:"StatusUpdateInterval"`
12662
12663	// Information about when jobs are submitted, started, and finished is specified
12664	// in Unix epoch format in seconds.
12665	Timing *Timing `locationName:"timing" type:"structure"`
12666
12667	// User-defined metadata that you want to associate with an MediaConvert job.
12668	// You specify metadata in key/value pairs.
12669	UserMetadata map[string]*string `locationName:"userMetadata" type:"map"`
12670}
12671
12672// String returns the string representation
12673func (s Job) String() string {
12674	return awsutil.Prettify(s)
12675}
12676
12677// GoString returns the string representation
12678func (s Job) GoString() string {
12679	return s.String()
12680}
12681
12682// SetAccelerationSettings sets the AccelerationSettings field's value.
12683func (s *Job) SetAccelerationSettings(v *AccelerationSettings) *Job {
12684	s.AccelerationSettings = v
12685	return s
12686}
12687
12688// SetAccelerationStatus sets the AccelerationStatus field's value.
12689func (s *Job) SetAccelerationStatus(v string) *Job {
12690	s.AccelerationStatus = &v
12691	return s
12692}
12693
12694// SetArn sets the Arn field's value.
12695func (s *Job) SetArn(v string) *Job {
12696	s.Arn = &v
12697	return s
12698}
12699
12700// SetBillingTagsSource sets the BillingTagsSource field's value.
12701func (s *Job) SetBillingTagsSource(v string) *Job {
12702	s.BillingTagsSource = &v
12703	return s
12704}
12705
12706// SetCreatedAt sets the CreatedAt field's value.
12707func (s *Job) SetCreatedAt(v time.Time) *Job {
12708	s.CreatedAt = &v
12709	return s
12710}
12711
12712// SetCurrentPhase sets the CurrentPhase field's value.
12713func (s *Job) SetCurrentPhase(v string) *Job {
12714	s.CurrentPhase = &v
12715	return s
12716}
12717
12718// SetErrorCode sets the ErrorCode field's value.
12719func (s *Job) SetErrorCode(v int64) *Job {
12720	s.ErrorCode = &v
12721	return s
12722}
12723
12724// SetErrorMessage sets the ErrorMessage field's value.
12725func (s *Job) SetErrorMessage(v string) *Job {
12726	s.ErrorMessage = &v
12727	return s
12728}
12729
12730// SetHopDestinations sets the HopDestinations field's value.
12731func (s *Job) SetHopDestinations(v []*HopDestination) *Job {
12732	s.HopDestinations = v
12733	return s
12734}
12735
12736// SetId sets the Id field's value.
12737func (s *Job) SetId(v string) *Job {
12738	s.Id = &v
12739	return s
12740}
12741
12742// SetJobPercentComplete sets the JobPercentComplete field's value.
12743func (s *Job) SetJobPercentComplete(v int64) *Job {
12744	s.JobPercentComplete = &v
12745	return s
12746}
12747
12748// SetJobTemplate sets the JobTemplate field's value.
12749func (s *Job) SetJobTemplate(v string) *Job {
12750	s.JobTemplate = &v
12751	return s
12752}
12753
12754// SetMessages sets the Messages field's value.
12755func (s *Job) SetMessages(v *JobMessages) *Job {
12756	s.Messages = v
12757	return s
12758}
12759
12760// SetOutputGroupDetails sets the OutputGroupDetails field's value.
12761func (s *Job) SetOutputGroupDetails(v []*OutputGroupDetail) *Job {
12762	s.OutputGroupDetails = v
12763	return s
12764}
12765
12766// SetPriority sets the Priority field's value.
12767func (s *Job) SetPriority(v int64) *Job {
12768	s.Priority = &v
12769	return s
12770}
12771
12772// SetQueue sets the Queue field's value.
12773func (s *Job) SetQueue(v string) *Job {
12774	s.Queue = &v
12775	return s
12776}
12777
12778// SetQueueTransitions sets the QueueTransitions field's value.
12779func (s *Job) SetQueueTransitions(v []*QueueTransition) *Job {
12780	s.QueueTransitions = v
12781	return s
12782}
12783
12784// SetRetryCount sets the RetryCount field's value.
12785func (s *Job) SetRetryCount(v int64) *Job {
12786	s.RetryCount = &v
12787	return s
12788}
12789
12790// SetRole sets the Role field's value.
12791func (s *Job) SetRole(v string) *Job {
12792	s.Role = &v
12793	return s
12794}
12795
12796// SetSettings sets the Settings field's value.
12797func (s *Job) SetSettings(v *JobSettings) *Job {
12798	s.Settings = v
12799	return s
12800}
12801
12802// SetSimulateReservedQueue sets the SimulateReservedQueue field's value.
12803func (s *Job) SetSimulateReservedQueue(v string) *Job {
12804	s.SimulateReservedQueue = &v
12805	return s
12806}
12807
12808// SetStatus sets the Status field's value.
12809func (s *Job) SetStatus(v string) *Job {
12810	s.Status = &v
12811	return s
12812}
12813
12814// SetStatusUpdateInterval sets the StatusUpdateInterval field's value.
12815func (s *Job) SetStatusUpdateInterval(v string) *Job {
12816	s.StatusUpdateInterval = &v
12817	return s
12818}
12819
12820// SetTiming sets the Timing field's value.
12821func (s *Job) SetTiming(v *Timing) *Job {
12822	s.Timing = v
12823	return s
12824}
12825
12826// SetUserMetadata sets the UserMetadata field's value.
12827func (s *Job) SetUserMetadata(v map[string]*string) *Job {
12828	s.UserMetadata = v
12829	return s
12830}
12831
12832// Provides messages from the service about jobs that you have already successfully
12833// submitted.
12834type JobMessages struct {
12835	_ struct{} `type:"structure"`
12836
12837	// List of messages that are informational only and don't indicate a problem
12838	// with your job.
12839	Info []*string `locationName:"info" type:"list"`
12840
12841	// List of messages that warn about conditions that might cause your job not
12842	// to run or to fail.
12843	Warning []*string `locationName:"warning" type:"list"`
12844}
12845
12846// String returns the string representation
12847func (s JobMessages) String() string {
12848	return awsutil.Prettify(s)
12849}
12850
12851// GoString returns the string representation
12852func (s JobMessages) GoString() string {
12853	return s.String()
12854}
12855
12856// SetInfo sets the Info field's value.
12857func (s *JobMessages) SetInfo(v []*string) *JobMessages {
12858	s.Info = v
12859	return s
12860}
12861
12862// SetWarning sets the Warning field's value.
12863func (s *JobMessages) SetWarning(v []*string) *JobMessages {
12864	s.Warning = v
12865	return s
12866}
12867
12868// JobSettings contains all the transcode settings for a job.
12869type JobSettings struct {
12870	_ struct{} `type:"structure"`
12871
12872	// When specified, this offset (in milliseconds) is added to the input Ad Avail
12873	// PTS time.
12874	AdAvailOffset *int64 `locationName:"adAvailOffset" type:"integer"`
12875
12876	// Settings for ad avail blanking. Video can be blanked or overlaid with an
12877	// image, and audio muted during SCTE-35 triggered ad avails.
12878	AvailBlanking *AvailBlanking `locationName:"availBlanking" type:"structure"`
12879
12880	// Settings for Event Signaling And Messaging (ESAM).
12881	Esam *EsamSettings `locationName:"esam" type:"structure"`
12882
12883	// Use Inputs (inputs) to define source file used in the transcode job. There
12884	// can be multiple inputs add in a job. These inputs will be concantenated together
12885	// to create the output.
12886	Inputs []*Input `locationName:"inputs" type:"list"`
12887
12888	// Overlay motion graphics on top of your video. The motion graphics that you
12889	// specify here appear on all outputs in all output groups.
12890	MotionImageInserter *MotionImageInserter `locationName:"motionImageInserter" type:"structure"`
12891
12892	// Settings for your Nielsen configuration. If you don't do Nielsen measurement
12893	// and analytics, ignore these settings. When you enable Nielsen configuration
12894	// (nielsenConfiguration), MediaConvert enables PCM to ID3 tagging for all outputs
12895	// in the job. To enable Nielsen configuration programmatically, include an
12896	// instance of nielsenConfiguration in your JSON job specification. Even if
12897	// you don't include any children of nielsenConfiguration, you still enable
12898	// the setting.
12899	NielsenConfiguration *NielsenConfiguration `locationName:"nielsenConfiguration" type:"structure"`
12900
12901	// Ignore these settings unless you are using Nielsen non-linear watermarking.
12902	// Specify the values that MediaConvert uses to generate and place Nielsen watermarks
12903	// in your output audio. In addition to specifying these values, you also need
12904	// to set up your cloud TIC server. These settings apply to every output in
12905	// your job. The MediaConvert implementation is currently with the following
12906	// Nielsen versions: Nielsen Watermark SDK Version 5.2.1 Nielsen NLM Watermark
12907	// Engine Version 1.2.7 Nielsen Watermark Authenticator [SID_TIC] Version [5.0.0]
12908	NielsenNonLinearWatermark *NielsenNonLinearWatermarkSettings `locationName:"nielsenNonLinearWatermark" type:"structure"`
12909
12910	// (OutputGroups) contains one group of settings for each set of outputs that
12911	// share a common package type. All unpackaged files (MPEG-4, MPEG-2 TS, Quicktime,
12912	// MXF, and no container) are grouped in a single output group as well. Required
12913	// in (OutputGroups) is a group of settings that apply to the whole group. This
12914	// required object depends on the value you set for (Type) under (OutputGroups)>(OutputGroupSettings).
12915	// Type, settings object pairs are as follows. * FILE_GROUP_SETTINGS, FileGroupSettings
12916	// * HLS_GROUP_SETTINGS, HlsGroupSettings * DASH_ISO_GROUP_SETTINGS, DashIsoGroupSettings
12917	// * MS_SMOOTH_GROUP_SETTINGS, MsSmoothGroupSettings * CMAF_GROUP_SETTINGS,
12918	// CmafGroupSettings
12919	OutputGroups []*OutputGroup `locationName:"outputGroups" type:"list"`
12920
12921	// Contains settings used to acquire and adjust timecode information from inputs.
12922	TimecodeConfig *TimecodeConfig `locationName:"timecodeConfig" type:"structure"`
12923
12924	// Enable Timed metadata insertion (TimedMetadataInsertion) to include ID3 tags
12925	// in any HLS outputs. To include timed metadata, you must enable it here, enable
12926	// it in each output container, and specify tags and timecodes in ID3 insertion
12927	// (Id3Insertion) objects.
12928	TimedMetadataInsertion *TimedMetadataInsertion `locationName:"timedMetadataInsertion" type:"structure"`
12929}
12930
12931// String returns the string representation
12932func (s JobSettings) String() string {
12933	return awsutil.Prettify(s)
12934}
12935
12936// GoString returns the string representation
12937func (s JobSettings) GoString() string {
12938	return s.String()
12939}
12940
12941// Validate inspects the fields of the type to determine if they are valid.
12942func (s *JobSettings) Validate() error {
12943	invalidParams := request.ErrInvalidParams{Context: "JobSettings"}
12944	if s.AdAvailOffset != nil && *s.AdAvailOffset < -1000 {
12945		invalidParams.Add(request.NewErrParamMinValue("AdAvailOffset", -1000))
12946	}
12947	if s.AvailBlanking != nil {
12948		if err := s.AvailBlanking.Validate(); err != nil {
12949			invalidParams.AddNested("AvailBlanking", err.(request.ErrInvalidParams))
12950		}
12951	}
12952	if s.Inputs != nil {
12953		for i, v := range s.Inputs {
12954			if v == nil {
12955				continue
12956			}
12957			if err := v.Validate(); err != nil {
12958				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "Inputs", i), err.(request.ErrInvalidParams))
12959			}
12960		}
12961	}
12962	if s.MotionImageInserter != nil {
12963		if err := s.MotionImageInserter.Validate(); err != nil {
12964			invalidParams.AddNested("MotionImageInserter", err.(request.ErrInvalidParams))
12965		}
12966	}
12967	if s.NielsenNonLinearWatermark != nil {
12968		if err := s.NielsenNonLinearWatermark.Validate(); err != nil {
12969			invalidParams.AddNested("NielsenNonLinearWatermark", err.(request.ErrInvalidParams))
12970		}
12971	}
12972	if s.OutputGroups != nil {
12973		for i, v := range s.OutputGroups {
12974			if v == nil {
12975				continue
12976			}
12977			if err := v.Validate(); err != nil {
12978				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "OutputGroups", i), err.(request.ErrInvalidParams))
12979			}
12980		}
12981	}
12982
12983	if invalidParams.Len() > 0 {
12984		return invalidParams
12985	}
12986	return nil
12987}
12988
12989// SetAdAvailOffset sets the AdAvailOffset field's value.
12990func (s *JobSettings) SetAdAvailOffset(v int64) *JobSettings {
12991	s.AdAvailOffset = &v
12992	return s
12993}
12994
12995// SetAvailBlanking sets the AvailBlanking field's value.
12996func (s *JobSettings) SetAvailBlanking(v *AvailBlanking) *JobSettings {
12997	s.AvailBlanking = v
12998	return s
12999}
13000
13001// SetEsam sets the Esam field's value.
13002func (s *JobSettings) SetEsam(v *EsamSettings) *JobSettings {
13003	s.Esam = v
13004	return s
13005}
13006
13007// SetInputs sets the Inputs field's value.
13008func (s *JobSettings) SetInputs(v []*Input) *JobSettings {
13009	s.Inputs = v
13010	return s
13011}
13012
13013// SetMotionImageInserter sets the MotionImageInserter field's value.
13014func (s *JobSettings) SetMotionImageInserter(v *MotionImageInserter) *JobSettings {
13015	s.MotionImageInserter = v
13016	return s
13017}
13018
13019// SetNielsenConfiguration sets the NielsenConfiguration field's value.
13020func (s *JobSettings) SetNielsenConfiguration(v *NielsenConfiguration) *JobSettings {
13021	s.NielsenConfiguration = v
13022	return s
13023}
13024
13025// SetNielsenNonLinearWatermark sets the NielsenNonLinearWatermark field's value.
13026func (s *JobSettings) SetNielsenNonLinearWatermark(v *NielsenNonLinearWatermarkSettings) *JobSettings {
13027	s.NielsenNonLinearWatermark = v
13028	return s
13029}
13030
13031// SetOutputGroups sets the OutputGroups field's value.
13032func (s *JobSettings) SetOutputGroups(v []*OutputGroup) *JobSettings {
13033	s.OutputGroups = v
13034	return s
13035}
13036
13037// SetTimecodeConfig sets the TimecodeConfig field's value.
13038func (s *JobSettings) SetTimecodeConfig(v *TimecodeConfig) *JobSettings {
13039	s.TimecodeConfig = v
13040	return s
13041}
13042
13043// SetTimedMetadataInsertion sets the TimedMetadataInsertion field's value.
13044func (s *JobSettings) SetTimedMetadataInsertion(v *TimedMetadataInsertion) *JobSettings {
13045	s.TimedMetadataInsertion = v
13046	return s
13047}
13048
13049// A job template is a pre-made set of encoding instructions that you can use
13050// to quickly create a job.
13051type JobTemplate struct {
13052	_ struct{} `type:"structure"`
13053
13054	// Accelerated transcoding can significantly speed up jobs with long, visually
13055	// complex content.
13056	AccelerationSettings *AccelerationSettings `locationName:"accelerationSettings" type:"structure"`
13057
13058	// An identifier for this resource that is unique within all of AWS.
13059	Arn *string `locationName:"arn" type:"string"`
13060
13061	// An optional category you create to organize your job templates.
13062	Category *string `locationName:"category" type:"string"`
13063
13064	// The timestamp in epoch seconds for Job template creation.
13065	CreatedAt *time.Time `locationName:"createdAt" type:"timestamp" timestampFormat:"unixTimestamp"`
13066
13067	// An optional description you create for each job template.
13068	Description *string `locationName:"description" type:"string"`
13069
13070	// Optional list of hop destinations.
13071	HopDestinations []*HopDestination `locationName:"hopDestinations" type:"list"`
13072
13073	// The timestamp in epoch seconds when the Job template was last updated.
13074	LastUpdated *time.Time `locationName:"lastUpdated" type:"timestamp" timestampFormat:"unixTimestamp"`
13075
13076	// A name you create for each job template. Each name must be unique within
13077	// your account.
13078	//
13079	// Name is a required field
13080	Name *string `locationName:"name" type:"string" required:"true"`
13081
13082	// Relative priority on the job.
13083	Priority *int64 `locationName:"priority" type:"integer"`
13084
13085	// Optional. The queue that jobs created from this template are assigned to.
13086	// If you don't specify this, jobs will go to the default queue.
13087	Queue *string `locationName:"queue" type:"string"`
13088
13089	// JobTemplateSettings contains all the transcode settings saved in the template
13090	// that will be applied to jobs created from it.
13091	//
13092	// Settings is a required field
13093	Settings *JobTemplateSettings `locationName:"settings" type:"structure" required:"true"`
13094
13095	// Specify how often MediaConvert sends STATUS_UPDATE events to Amazon CloudWatch
13096	// Events. Set the interval, in seconds, between status updates. MediaConvert
13097	// sends an update at this interval from the time the service begins processing
13098	// your job to the time it completes the transcode or encounters an error.
13099	StatusUpdateInterval *string `locationName:"statusUpdateInterval" type:"string" enum:"StatusUpdateInterval"`
13100
13101	// A job template can be of two types: system or custom. System or built-in
13102	// job templates can't be modified or deleted by the user.
13103	Type *string `locationName:"type" type:"string" enum:"Type"`
13104}
13105
13106// String returns the string representation
13107func (s JobTemplate) String() string {
13108	return awsutil.Prettify(s)
13109}
13110
13111// GoString returns the string representation
13112func (s JobTemplate) GoString() string {
13113	return s.String()
13114}
13115
13116// SetAccelerationSettings sets the AccelerationSettings field's value.
13117func (s *JobTemplate) SetAccelerationSettings(v *AccelerationSettings) *JobTemplate {
13118	s.AccelerationSettings = v
13119	return s
13120}
13121
13122// SetArn sets the Arn field's value.
13123func (s *JobTemplate) SetArn(v string) *JobTemplate {
13124	s.Arn = &v
13125	return s
13126}
13127
13128// SetCategory sets the Category field's value.
13129func (s *JobTemplate) SetCategory(v string) *JobTemplate {
13130	s.Category = &v
13131	return s
13132}
13133
13134// SetCreatedAt sets the CreatedAt field's value.
13135func (s *JobTemplate) SetCreatedAt(v time.Time) *JobTemplate {
13136	s.CreatedAt = &v
13137	return s
13138}
13139
13140// SetDescription sets the Description field's value.
13141func (s *JobTemplate) SetDescription(v string) *JobTemplate {
13142	s.Description = &v
13143	return s
13144}
13145
13146// SetHopDestinations sets the HopDestinations field's value.
13147func (s *JobTemplate) SetHopDestinations(v []*HopDestination) *JobTemplate {
13148	s.HopDestinations = v
13149	return s
13150}
13151
13152// SetLastUpdated sets the LastUpdated field's value.
13153func (s *JobTemplate) SetLastUpdated(v time.Time) *JobTemplate {
13154	s.LastUpdated = &v
13155	return s
13156}
13157
13158// SetName sets the Name field's value.
13159func (s *JobTemplate) SetName(v string) *JobTemplate {
13160	s.Name = &v
13161	return s
13162}
13163
13164// SetPriority sets the Priority field's value.
13165func (s *JobTemplate) SetPriority(v int64) *JobTemplate {
13166	s.Priority = &v
13167	return s
13168}
13169
13170// SetQueue sets the Queue field's value.
13171func (s *JobTemplate) SetQueue(v string) *JobTemplate {
13172	s.Queue = &v
13173	return s
13174}
13175
13176// SetSettings sets the Settings field's value.
13177func (s *JobTemplate) SetSettings(v *JobTemplateSettings) *JobTemplate {
13178	s.Settings = v
13179	return s
13180}
13181
13182// SetStatusUpdateInterval sets the StatusUpdateInterval field's value.
13183func (s *JobTemplate) SetStatusUpdateInterval(v string) *JobTemplate {
13184	s.StatusUpdateInterval = &v
13185	return s
13186}
13187
13188// SetType sets the Type field's value.
13189func (s *JobTemplate) SetType(v string) *JobTemplate {
13190	s.Type = &v
13191	return s
13192}
13193
13194// JobTemplateSettings contains all the transcode settings saved in the template
13195// that will be applied to jobs created from it.
13196type JobTemplateSettings struct {
13197	_ struct{} `type:"structure"`
13198
13199	// When specified, this offset (in milliseconds) is added to the input Ad Avail
13200	// PTS time.
13201	AdAvailOffset *int64 `locationName:"adAvailOffset" type:"integer"`
13202
13203	// Settings for ad avail blanking. Video can be blanked or overlaid with an
13204	// image, and audio muted during SCTE-35 triggered ad avails.
13205	AvailBlanking *AvailBlanking `locationName:"availBlanking" type:"structure"`
13206
13207	// Settings for Event Signaling And Messaging (ESAM).
13208	Esam *EsamSettings `locationName:"esam" type:"structure"`
13209
13210	// Use Inputs (inputs) to define the source file used in the transcode job.
13211	// There can only be one input in a job template. Using the API, you can include
13212	// multiple inputs when referencing a job template.
13213	Inputs []*InputTemplate `locationName:"inputs" type:"list"`
13214
13215	// Overlay motion graphics on top of your video. The motion graphics that you
13216	// specify here appear on all outputs in all output groups.
13217	MotionImageInserter *MotionImageInserter `locationName:"motionImageInserter" type:"structure"`
13218
13219	// Settings for your Nielsen configuration. If you don't do Nielsen measurement
13220	// and analytics, ignore these settings. When you enable Nielsen configuration
13221	// (nielsenConfiguration), MediaConvert enables PCM to ID3 tagging for all outputs
13222	// in the job. To enable Nielsen configuration programmatically, include an
13223	// instance of nielsenConfiguration in your JSON job specification. Even if
13224	// you don't include any children of nielsenConfiguration, you still enable
13225	// the setting.
13226	NielsenConfiguration *NielsenConfiguration `locationName:"nielsenConfiguration" type:"structure"`
13227
13228	// Ignore these settings unless you are using Nielsen non-linear watermarking.
13229	// Specify the values that MediaConvert uses to generate and place Nielsen watermarks
13230	// in your output audio. In addition to specifying these values, you also need
13231	// to set up your cloud TIC server. These settings apply to every output in
13232	// your job. The MediaConvert implementation is currently with the following
13233	// Nielsen versions: Nielsen Watermark SDK Version 5.2.1 Nielsen NLM Watermark
13234	// Engine Version 1.2.7 Nielsen Watermark Authenticator [SID_TIC] Version [5.0.0]
13235	NielsenNonLinearWatermark *NielsenNonLinearWatermarkSettings `locationName:"nielsenNonLinearWatermark" type:"structure"`
13236
13237	// (OutputGroups) contains one group of settings for each set of outputs that
13238	// share a common package type. All unpackaged files (MPEG-4, MPEG-2 TS, Quicktime,
13239	// MXF, and no container) are grouped in a single output group as well. Required
13240	// in (OutputGroups) is a group of settings that apply to the whole group. This
13241	// required object depends on the value you set for (Type) under (OutputGroups)>(OutputGroupSettings).
13242	// Type, settings object pairs are as follows. * FILE_GROUP_SETTINGS, FileGroupSettings
13243	// * HLS_GROUP_SETTINGS, HlsGroupSettings * DASH_ISO_GROUP_SETTINGS, DashIsoGroupSettings
13244	// * MS_SMOOTH_GROUP_SETTINGS, MsSmoothGroupSettings * CMAF_GROUP_SETTINGS,
13245	// CmafGroupSettings
13246	OutputGroups []*OutputGroup `locationName:"outputGroups" type:"list"`
13247
13248	// Contains settings used to acquire and adjust timecode information from inputs.
13249	TimecodeConfig *TimecodeConfig `locationName:"timecodeConfig" type:"structure"`
13250
13251	// Enable Timed metadata insertion (TimedMetadataInsertion) to include ID3 tags
13252	// in any HLS outputs. To include timed metadata, you must enable it here, enable
13253	// it in each output container, and specify tags and timecodes in ID3 insertion
13254	// (Id3Insertion) objects.
13255	TimedMetadataInsertion *TimedMetadataInsertion `locationName:"timedMetadataInsertion" type:"structure"`
13256}
13257
13258// String returns the string representation
13259func (s JobTemplateSettings) String() string {
13260	return awsutil.Prettify(s)
13261}
13262
13263// GoString returns the string representation
13264func (s JobTemplateSettings) GoString() string {
13265	return s.String()
13266}
13267
13268// Validate inspects the fields of the type to determine if they are valid.
13269func (s *JobTemplateSettings) Validate() error {
13270	invalidParams := request.ErrInvalidParams{Context: "JobTemplateSettings"}
13271	if s.AdAvailOffset != nil && *s.AdAvailOffset < -1000 {
13272		invalidParams.Add(request.NewErrParamMinValue("AdAvailOffset", -1000))
13273	}
13274	if s.AvailBlanking != nil {
13275		if err := s.AvailBlanking.Validate(); err != nil {
13276			invalidParams.AddNested("AvailBlanking", err.(request.ErrInvalidParams))
13277		}
13278	}
13279	if s.Inputs != nil {
13280		for i, v := range s.Inputs {
13281			if v == nil {
13282				continue
13283			}
13284			if err := v.Validate(); err != nil {
13285				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "Inputs", i), err.(request.ErrInvalidParams))
13286			}
13287		}
13288	}
13289	if s.MotionImageInserter != nil {
13290		if err := s.MotionImageInserter.Validate(); err != nil {
13291			invalidParams.AddNested("MotionImageInserter", err.(request.ErrInvalidParams))
13292		}
13293	}
13294	if s.NielsenNonLinearWatermark != nil {
13295		if err := s.NielsenNonLinearWatermark.Validate(); err != nil {
13296			invalidParams.AddNested("NielsenNonLinearWatermark", err.(request.ErrInvalidParams))
13297		}
13298	}
13299	if s.OutputGroups != nil {
13300		for i, v := range s.OutputGroups {
13301			if v == nil {
13302				continue
13303			}
13304			if err := v.Validate(); err != nil {
13305				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "OutputGroups", i), err.(request.ErrInvalidParams))
13306			}
13307		}
13308	}
13309
13310	if invalidParams.Len() > 0 {
13311		return invalidParams
13312	}
13313	return nil
13314}
13315
13316// SetAdAvailOffset sets the AdAvailOffset field's value.
13317func (s *JobTemplateSettings) SetAdAvailOffset(v int64) *JobTemplateSettings {
13318	s.AdAvailOffset = &v
13319	return s
13320}
13321
13322// SetAvailBlanking sets the AvailBlanking field's value.
13323func (s *JobTemplateSettings) SetAvailBlanking(v *AvailBlanking) *JobTemplateSettings {
13324	s.AvailBlanking = v
13325	return s
13326}
13327
13328// SetEsam sets the Esam field's value.
13329func (s *JobTemplateSettings) SetEsam(v *EsamSettings) *JobTemplateSettings {
13330	s.Esam = v
13331	return s
13332}
13333
13334// SetInputs sets the Inputs field's value.
13335func (s *JobTemplateSettings) SetInputs(v []*InputTemplate) *JobTemplateSettings {
13336	s.Inputs = v
13337	return s
13338}
13339
13340// SetMotionImageInserter sets the MotionImageInserter field's value.
13341func (s *JobTemplateSettings) SetMotionImageInserter(v *MotionImageInserter) *JobTemplateSettings {
13342	s.MotionImageInserter = v
13343	return s
13344}
13345
13346// SetNielsenConfiguration sets the NielsenConfiguration field's value.
13347func (s *JobTemplateSettings) SetNielsenConfiguration(v *NielsenConfiguration) *JobTemplateSettings {
13348	s.NielsenConfiguration = v
13349	return s
13350}
13351
13352// SetNielsenNonLinearWatermark sets the NielsenNonLinearWatermark field's value.
13353func (s *JobTemplateSettings) SetNielsenNonLinearWatermark(v *NielsenNonLinearWatermarkSettings) *JobTemplateSettings {
13354	s.NielsenNonLinearWatermark = v
13355	return s
13356}
13357
13358// SetOutputGroups sets the OutputGroups field's value.
13359func (s *JobTemplateSettings) SetOutputGroups(v []*OutputGroup) *JobTemplateSettings {
13360	s.OutputGroups = v
13361	return s
13362}
13363
13364// SetTimecodeConfig sets the TimecodeConfig field's value.
13365func (s *JobTemplateSettings) SetTimecodeConfig(v *TimecodeConfig) *JobTemplateSettings {
13366	s.TimecodeConfig = v
13367	return s
13368}
13369
13370// SetTimedMetadataInsertion sets the TimedMetadataInsertion field's value.
13371func (s *JobTemplateSettings) SetTimedMetadataInsertion(v *TimedMetadataInsertion) *JobTemplateSettings {
13372	s.TimedMetadataInsertion = v
13373	return s
13374}
13375
13376// You can send list job templates requests with an empty body. Optionally,
13377// you can filter the response by category by specifying it in your request
13378// body. You can also optionally specify the maximum number, up to twenty, of
13379// job templates to be returned.
13380type ListJobTemplatesInput struct {
13381	_ struct{} `type:"structure"`
13382
13383	// Optionally, specify a job template category to limit responses to only job
13384	// templates from that category.
13385	Category *string `location:"querystring" locationName:"category" type:"string"`
13386
13387	// Optional. When you request a list of job templates, you can choose to list
13388	// them alphabetically by NAME or chronologically by CREATION_DATE. If you don't
13389	// specify, the service will list them by name.
13390	ListBy *string `location:"querystring" locationName:"listBy" type:"string" enum:"JobTemplateListBy"`
13391
13392	// Optional. Number of job templates, up to twenty, that will be returned at
13393	// one time.
13394	MaxResults *int64 `location:"querystring" locationName:"maxResults" min:"1" type:"integer"`
13395
13396	// Use this string, provided with the response to a previous request, to request
13397	// the next batch of job templates.
13398	NextToken *string `location:"querystring" locationName:"nextToken" type:"string"`
13399
13400	// Optional. When you request lists of resources, you can specify whether they
13401	// are sorted in ASCENDING or DESCENDING order. Default varies by resource.
13402	Order *string `location:"querystring" locationName:"order" type:"string" enum:"Order"`
13403}
13404
13405// String returns the string representation
13406func (s ListJobTemplatesInput) String() string {
13407	return awsutil.Prettify(s)
13408}
13409
13410// GoString returns the string representation
13411func (s ListJobTemplatesInput) GoString() string {
13412	return s.String()
13413}
13414
13415// Validate inspects the fields of the type to determine if they are valid.
13416func (s *ListJobTemplatesInput) Validate() error {
13417	invalidParams := request.ErrInvalidParams{Context: "ListJobTemplatesInput"}
13418	if s.MaxResults != nil && *s.MaxResults < 1 {
13419		invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1))
13420	}
13421
13422	if invalidParams.Len() > 0 {
13423		return invalidParams
13424	}
13425	return nil
13426}
13427
13428// SetCategory sets the Category field's value.
13429func (s *ListJobTemplatesInput) SetCategory(v string) *ListJobTemplatesInput {
13430	s.Category = &v
13431	return s
13432}
13433
13434// SetListBy sets the ListBy field's value.
13435func (s *ListJobTemplatesInput) SetListBy(v string) *ListJobTemplatesInput {
13436	s.ListBy = &v
13437	return s
13438}
13439
13440// SetMaxResults sets the MaxResults field's value.
13441func (s *ListJobTemplatesInput) SetMaxResults(v int64) *ListJobTemplatesInput {
13442	s.MaxResults = &v
13443	return s
13444}
13445
13446// SetNextToken sets the NextToken field's value.
13447func (s *ListJobTemplatesInput) SetNextToken(v string) *ListJobTemplatesInput {
13448	s.NextToken = &v
13449	return s
13450}
13451
13452// SetOrder sets the Order field's value.
13453func (s *ListJobTemplatesInput) SetOrder(v string) *ListJobTemplatesInput {
13454	s.Order = &v
13455	return s
13456}
13457
13458// Successful list job templates requests return a JSON array of job templates.
13459// If you don't specify how they are ordered, you will receive them in alphabetical
13460// order by name.
13461type ListJobTemplatesOutput struct {
13462	_ struct{} `type:"structure"`
13463
13464	// List of Job templates.
13465	JobTemplates []*JobTemplate `locationName:"jobTemplates" type:"list"`
13466
13467	// Use this string to request the next batch of job templates.
13468	NextToken *string `locationName:"nextToken" type:"string"`
13469}
13470
13471// String returns the string representation
13472func (s ListJobTemplatesOutput) String() string {
13473	return awsutil.Prettify(s)
13474}
13475
13476// GoString returns the string representation
13477func (s ListJobTemplatesOutput) GoString() string {
13478	return s.String()
13479}
13480
13481// SetJobTemplates sets the JobTemplates field's value.
13482func (s *ListJobTemplatesOutput) SetJobTemplates(v []*JobTemplate) *ListJobTemplatesOutput {
13483	s.JobTemplates = v
13484	return s
13485}
13486
13487// SetNextToken sets the NextToken field's value.
13488func (s *ListJobTemplatesOutput) SetNextToken(v string) *ListJobTemplatesOutput {
13489	s.NextToken = &v
13490	return s
13491}
13492
13493// You can send list jobs requests with an empty body. Optionally, you can filter
13494// the response by queue and/or job status by specifying them in your request
13495// body. You can also optionally specify the maximum number, up to twenty, of
13496// jobs to be returned.
13497type ListJobsInput struct {
13498	_ struct{} `type:"structure"`
13499
13500	// Optional. Number of jobs, up to twenty, that will be returned at one time.
13501	MaxResults *int64 `location:"querystring" locationName:"maxResults" min:"1" type:"integer"`
13502
13503	// Optional. Use this string, provided with the response to a previous request,
13504	// to request the next batch of jobs.
13505	NextToken *string `location:"querystring" locationName:"nextToken" type:"string"`
13506
13507	// Optional. When you request lists of resources, you can specify whether they
13508	// are sorted in ASCENDING or DESCENDING order. Default varies by resource.
13509	Order *string `location:"querystring" locationName:"order" type:"string" enum:"Order"`
13510
13511	// Optional. Provide a queue name to get back only jobs from that queue.
13512	Queue *string `location:"querystring" locationName:"queue" type:"string"`
13513
13514	// Optional. A job's status can be SUBMITTED, PROGRESSING, COMPLETE, CANCELED,
13515	// or ERROR.
13516	Status *string `location:"querystring" locationName:"status" type:"string" enum:"JobStatus"`
13517}
13518
13519// String returns the string representation
13520func (s ListJobsInput) String() string {
13521	return awsutil.Prettify(s)
13522}
13523
13524// GoString returns the string representation
13525func (s ListJobsInput) GoString() string {
13526	return s.String()
13527}
13528
13529// Validate inspects the fields of the type to determine if they are valid.
13530func (s *ListJobsInput) Validate() error {
13531	invalidParams := request.ErrInvalidParams{Context: "ListJobsInput"}
13532	if s.MaxResults != nil && *s.MaxResults < 1 {
13533		invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1))
13534	}
13535
13536	if invalidParams.Len() > 0 {
13537		return invalidParams
13538	}
13539	return nil
13540}
13541
13542// SetMaxResults sets the MaxResults field's value.
13543func (s *ListJobsInput) SetMaxResults(v int64) *ListJobsInput {
13544	s.MaxResults = &v
13545	return s
13546}
13547
13548// SetNextToken sets the NextToken field's value.
13549func (s *ListJobsInput) SetNextToken(v string) *ListJobsInput {
13550	s.NextToken = &v
13551	return s
13552}
13553
13554// SetOrder sets the Order field's value.
13555func (s *ListJobsInput) SetOrder(v string) *ListJobsInput {
13556	s.Order = &v
13557	return s
13558}
13559
13560// SetQueue sets the Queue field's value.
13561func (s *ListJobsInput) SetQueue(v string) *ListJobsInput {
13562	s.Queue = &v
13563	return s
13564}
13565
13566// SetStatus sets the Status field's value.
13567func (s *ListJobsInput) SetStatus(v string) *ListJobsInput {
13568	s.Status = &v
13569	return s
13570}
13571
13572// Successful list jobs requests return a JSON array of jobs. If you don't specify
13573// how they are ordered, you will receive the most recently created first.
13574type ListJobsOutput struct {
13575	_ struct{} `type:"structure"`
13576
13577	// List of jobs
13578	Jobs []*Job `locationName:"jobs" type:"list"`
13579
13580	// Use this string to request the next batch of jobs.
13581	NextToken *string `locationName:"nextToken" type:"string"`
13582}
13583
13584// String returns the string representation
13585func (s ListJobsOutput) String() string {
13586	return awsutil.Prettify(s)
13587}
13588
13589// GoString returns the string representation
13590func (s ListJobsOutput) GoString() string {
13591	return s.String()
13592}
13593
13594// SetJobs sets the Jobs field's value.
13595func (s *ListJobsOutput) SetJobs(v []*Job) *ListJobsOutput {
13596	s.Jobs = v
13597	return s
13598}
13599
13600// SetNextToken sets the NextToken field's value.
13601func (s *ListJobsOutput) SetNextToken(v string) *ListJobsOutput {
13602	s.NextToken = &v
13603	return s
13604}
13605
13606// You can send list presets requests with an empty body. Optionally, you can
13607// filter the response by category by specifying it in your request body. You
13608// can also optionally specify the maximum number, up to twenty, of queues to
13609// be returned.
13610type ListPresetsInput struct {
13611	_ struct{} `type:"structure"`
13612
13613	// Optionally, specify a preset category to limit responses to only presets
13614	// from that category.
13615	Category *string `location:"querystring" locationName:"category" type:"string"`
13616
13617	// Optional. When you request a list of presets, you can choose to list them
13618	// alphabetically by NAME or chronologically by CREATION_DATE. If you don't
13619	// specify, the service will list them by name.
13620	ListBy *string `location:"querystring" locationName:"listBy" type:"string" enum:"PresetListBy"`
13621
13622	// Optional. Number of presets, up to twenty, that will be returned at one time
13623	MaxResults *int64 `location:"querystring" locationName:"maxResults" min:"1" type:"integer"`
13624
13625	// Use this string, provided with the response to a previous request, to request
13626	// the next batch of presets.
13627	NextToken *string `location:"querystring" locationName:"nextToken" type:"string"`
13628
13629	// Optional. When you request lists of resources, you can specify whether they
13630	// are sorted in ASCENDING or DESCENDING order. Default varies by resource.
13631	Order *string `location:"querystring" locationName:"order" type:"string" enum:"Order"`
13632}
13633
13634// String returns the string representation
13635func (s ListPresetsInput) String() string {
13636	return awsutil.Prettify(s)
13637}
13638
13639// GoString returns the string representation
13640func (s ListPresetsInput) GoString() string {
13641	return s.String()
13642}
13643
13644// Validate inspects the fields of the type to determine if they are valid.
13645func (s *ListPresetsInput) Validate() error {
13646	invalidParams := request.ErrInvalidParams{Context: "ListPresetsInput"}
13647	if s.MaxResults != nil && *s.MaxResults < 1 {
13648		invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1))
13649	}
13650
13651	if invalidParams.Len() > 0 {
13652		return invalidParams
13653	}
13654	return nil
13655}
13656
13657// SetCategory sets the Category field's value.
13658func (s *ListPresetsInput) SetCategory(v string) *ListPresetsInput {
13659	s.Category = &v
13660	return s
13661}
13662
13663// SetListBy sets the ListBy field's value.
13664func (s *ListPresetsInput) SetListBy(v string) *ListPresetsInput {
13665	s.ListBy = &v
13666	return s
13667}
13668
13669// SetMaxResults sets the MaxResults field's value.
13670func (s *ListPresetsInput) SetMaxResults(v int64) *ListPresetsInput {
13671	s.MaxResults = &v
13672	return s
13673}
13674
13675// SetNextToken sets the NextToken field's value.
13676func (s *ListPresetsInput) SetNextToken(v string) *ListPresetsInput {
13677	s.NextToken = &v
13678	return s
13679}
13680
13681// SetOrder sets the Order field's value.
13682func (s *ListPresetsInput) SetOrder(v string) *ListPresetsInput {
13683	s.Order = &v
13684	return s
13685}
13686
13687// Successful list presets requests return a JSON array of presets. If you don't
13688// specify how they are ordered, you will receive them alphabetically by name.
13689type ListPresetsOutput struct {
13690	_ struct{} `type:"structure"`
13691
13692	// Use this string to request the next batch of presets.
13693	NextToken *string `locationName:"nextToken" type:"string"`
13694
13695	// List of presets
13696	Presets []*Preset `locationName:"presets" type:"list"`
13697}
13698
13699// String returns the string representation
13700func (s ListPresetsOutput) String() string {
13701	return awsutil.Prettify(s)
13702}
13703
13704// GoString returns the string representation
13705func (s ListPresetsOutput) GoString() string {
13706	return s.String()
13707}
13708
13709// SetNextToken sets the NextToken field's value.
13710func (s *ListPresetsOutput) SetNextToken(v string) *ListPresetsOutput {
13711	s.NextToken = &v
13712	return s
13713}
13714
13715// SetPresets sets the Presets field's value.
13716func (s *ListPresetsOutput) SetPresets(v []*Preset) *ListPresetsOutput {
13717	s.Presets = v
13718	return s
13719}
13720
13721// You can send list queues requests with an empty body. You can optionally
13722// specify the maximum number, up to twenty, of queues to be returned.
13723type ListQueuesInput struct {
13724	_ struct{} `type:"structure"`
13725
13726	// Optional. When you request a list of queues, you can choose to list them
13727	// alphabetically by NAME or chronologically by CREATION_DATE. If you don't
13728	// specify, the service will list them by creation date.
13729	ListBy *string `location:"querystring" locationName:"listBy" type:"string" enum:"QueueListBy"`
13730
13731	// Optional. Number of queues, up to twenty, that will be returned at one time.
13732	MaxResults *int64 `location:"querystring" locationName:"maxResults" min:"1" type:"integer"`
13733
13734	// Use this string, provided with the response to a previous request, to request
13735	// the next batch of queues.
13736	NextToken *string `location:"querystring" locationName:"nextToken" type:"string"`
13737
13738	// Optional. When you request lists of resources, you can specify whether they
13739	// are sorted in ASCENDING or DESCENDING order. Default varies by resource.
13740	Order *string `location:"querystring" locationName:"order" type:"string" enum:"Order"`
13741}
13742
13743// String returns the string representation
13744func (s ListQueuesInput) String() string {
13745	return awsutil.Prettify(s)
13746}
13747
13748// GoString returns the string representation
13749func (s ListQueuesInput) GoString() string {
13750	return s.String()
13751}
13752
13753// Validate inspects the fields of the type to determine if they are valid.
13754func (s *ListQueuesInput) Validate() error {
13755	invalidParams := request.ErrInvalidParams{Context: "ListQueuesInput"}
13756	if s.MaxResults != nil && *s.MaxResults < 1 {
13757		invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1))
13758	}
13759
13760	if invalidParams.Len() > 0 {
13761		return invalidParams
13762	}
13763	return nil
13764}
13765
13766// SetListBy sets the ListBy field's value.
13767func (s *ListQueuesInput) SetListBy(v string) *ListQueuesInput {
13768	s.ListBy = &v
13769	return s
13770}
13771
13772// SetMaxResults sets the MaxResults field's value.
13773func (s *ListQueuesInput) SetMaxResults(v int64) *ListQueuesInput {
13774	s.MaxResults = &v
13775	return s
13776}
13777
13778// SetNextToken sets the NextToken field's value.
13779func (s *ListQueuesInput) SetNextToken(v string) *ListQueuesInput {
13780	s.NextToken = &v
13781	return s
13782}
13783
13784// SetOrder sets the Order field's value.
13785func (s *ListQueuesInput) SetOrder(v string) *ListQueuesInput {
13786	s.Order = &v
13787	return s
13788}
13789
13790// Successful list queues requests return a JSON array of queues. If you don't
13791// specify how they are ordered, you will receive them alphabetically by name.
13792type ListQueuesOutput struct {
13793	_ struct{} `type:"structure"`
13794
13795	// Use this string to request the next batch of queues.
13796	NextToken *string `locationName:"nextToken" type:"string"`
13797
13798	// List of queues.
13799	Queues []*Queue `locationName:"queues" type:"list"`
13800}
13801
13802// String returns the string representation
13803func (s ListQueuesOutput) String() string {
13804	return awsutil.Prettify(s)
13805}
13806
13807// GoString returns the string representation
13808func (s ListQueuesOutput) GoString() string {
13809	return s.String()
13810}
13811
13812// SetNextToken sets the NextToken field's value.
13813func (s *ListQueuesOutput) SetNextToken(v string) *ListQueuesOutput {
13814	s.NextToken = &v
13815	return s
13816}
13817
13818// SetQueues sets the Queues field's value.
13819func (s *ListQueuesOutput) SetQueues(v []*Queue) *ListQueuesOutput {
13820	s.Queues = v
13821	return s
13822}
13823
13824// List the tags for your AWS Elemental MediaConvert resource by sending a request
13825// with the Amazon Resource Name (ARN) of the resource. To get the ARN, send
13826// a GET request with the resource name.
13827type ListTagsForResourceInput struct {
13828	_ struct{} `type:"structure"`
13829
13830	// The Amazon Resource Name (ARN) of the resource that you want to list tags
13831	// for. To get the ARN, send a GET request with the resource name.
13832	//
13833	// Arn is a required field
13834	Arn *string `location:"uri" locationName:"arn" type:"string" required:"true"`
13835}
13836
13837// String returns the string representation
13838func (s ListTagsForResourceInput) String() string {
13839	return awsutil.Prettify(s)
13840}
13841
13842// GoString returns the string representation
13843func (s ListTagsForResourceInput) GoString() string {
13844	return s.String()
13845}
13846
13847// Validate inspects the fields of the type to determine if they are valid.
13848func (s *ListTagsForResourceInput) Validate() error {
13849	invalidParams := request.ErrInvalidParams{Context: "ListTagsForResourceInput"}
13850	if s.Arn == nil {
13851		invalidParams.Add(request.NewErrParamRequired("Arn"))
13852	}
13853	if s.Arn != nil && len(*s.Arn) < 1 {
13854		invalidParams.Add(request.NewErrParamMinLen("Arn", 1))
13855	}
13856
13857	if invalidParams.Len() > 0 {
13858		return invalidParams
13859	}
13860	return nil
13861}
13862
13863// SetArn sets the Arn field's value.
13864func (s *ListTagsForResourceInput) SetArn(v string) *ListTagsForResourceInput {
13865	s.Arn = &v
13866	return s
13867}
13868
13869// A successful request to list the tags for a resource returns a JSON map of
13870// tags.
13871type ListTagsForResourceOutput struct {
13872	_ struct{} `type:"structure"`
13873
13874	// The Amazon Resource Name (ARN) and tags for an AWS Elemental MediaConvert
13875	// resource.
13876	ResourceTags *ResourceTags `locationName:"resourceTags" type:"structure"`
13877}
13878
13879// String returns the string representation
13880func (s ListTagsForResourceOutput) String() string {
13881	return awsutil.Prettify(s)
13882}
13883
13884// GoString returns the string representation
13885func (s ListTagsForResourceOutput) GoString() string {
13886	return s.String()
13887}
13888
13889// SetResourceTags sets the ResourceTags field's value.
13890func (s *ListTagsForResourceOutput) SetResourceTags(v *ResourceTags) *ListTagsForResourceOutput {
13891	s.ResourceTags = v
13892	return s
13893}
13894
13895// Settings for SCTE-35 signals from ESAM. Include this in your job settings
13896// to put SCTE-35 markers in your HLS and transport stream outputs at the insertion
13897// points that you specify in an ESAM XML document. Provide the document in
13898// the setting SCC XML (sccXml).
13899type M2tsScte35Esam struct {
13900	_ struct{} `type:"structure"`
13901
13902	// Packet Identifier (PID) of the SCTE-35 stream in the transport stream generated
13903	// by ESAM.
13904	Scte35EsamPid *int64 `locationName:"scte35EsamPid" min:"32" type:"integer"`
13905}
13906
13907// String returns the string representation
13908func (s M2tsScte35Esam) String() string {
13909	return awsutil.Prettify(s)
13910}
13911
13912// GoString returns the string representation
13913func (s M2tsScte35Esam) GoString() string {
13914	return s.String()
13915}
13916
13917// Validate inspects the fields of the type to determine if they are valid.
13918func (s *M2tsScte35Esam) Validate() error {
13919	invalidParams := request.ErrInvalidParams{Context: "M2tsScte35Esam"}
13920	if s.Scte35EsamPid != nil && *s.Scte35EsamPid < 32 {
13921		invalidParams.Add(request.NewErrParamMinValue("Scte35EsamPid", 32))
13922	}
13923
13924	if invalidParams.Len() > 0 {
13925		return invalidParams
13926	}
13927	return nil
13928}
13929
13930// SetScte35EsamPid sets the Scte35EsamPid field's value.
13931func (s *M2tsScte35Esam) SetScte35EsamPid(v int64) *M2tsScte35Esam {
13932	s.Scte35EsamPid = &v
13933	return s
13934}
13935
13936// MPEG-2 TS container settings. These apply to outputs in a File output group
13937// when the output's container (ContainerType) is MPEG-2 Transport Stream (M2TS).
13938// In these assets, data is organized by the program map table (PMT). Each transport
13939// stream program contains subsets of data, including audio, video, and metadata.
13940// Each of these subsets of data has a numerical label called a packet identifier
13941// (PID). Each transport stream program corresponds to one MediaConvert output.
13942// The PMT lists the types of data in a program along with their PID. Downstream
13943// systems and players use the program map table to look up the PID for each
13944// type of data it accesses and then uses the PIDs to locate specific data within
13945// the asset.
13946type M2tsSettings struct {
13947	_ struct{} `type:"structure"`
13948
13949	// Selects between the DVB and ATSC buffer models for Dolby Digital audio.
13950	AudioBufferModel *string `locationName:"audioBufferModel" type:"string" enum:"M2tsAudioBufferModel"`
13951
13952	// Specify this setting only when your output will be consumed by a downstream
13953	// repackaging workflow that is sensitive to very small duration differences
13954	// between video and audio. For this situation, choose Match video duration
13955	// (MATCH_VIDEO_DURATION). In all other cases, keep the default value, Default
13956	// codec duration (DEFAULT_CODEC_DURATION). When you choose Match video duration,
13957	// MediaConvert pads the output audio streams with silence or trims them to
13958	// ensure that the total duration of each audio stream is at least as long as
13959	// the total duration of the video stream. After padding or trimming, the audio
13960	// stream duration is no more than one frame longer than the video stream. MediaConvert
13961	// applies audio padding or trimming only to the end of the last segment of
13962	// the output. For unsegmented outputs, MediaConvert adds padding only to the
13963	// end of the file. When you keep the default value, any minor discrepancies
13964	// between audio and video duration will depend on your output audio codec.
13965	AudioDuration *string `locationName:"audioDuration" type:"string" enum:"M2tsAudioDuration"`
13966
13967	// The number of audio frames to insert for each PES packet.
13968	AudioFramesPerPes *int64 `locationName:"audioFramesPerPes" type:"integer"`
13969
13970	// Specify the packet identifiers (PIDs) for any elementary audio streams you
13971	// include in this output. Specify multiple PIDs as a JSON array. Default is
13972	// the range 482-492.
13973	AudioPids []*int64 `locationName:"audioPids" type:"list"`
13974
13975	// Specify the output bitrate of the transport stream in bits per second. Setting
13976	// to 0 lets the muxer automatically determine the appropriate bitrate. Other
13977	// common values are 3750000, 7500000, and 15000000.
13978	Bitrate *int64 `locationName:"bitrate" type:"integer"`
13979
13980	// Controls what buffer model to use for accurate interleaving. If set to MULTIPLEX,
13981	// use multiplex buffer model. If set to NONE, this can lead to lower latency,
13982	// but low-memory devices may not be able to play back the stream without interruptions.
13983	BufferModel *string `locationName:"bufferModel" type:"string" enum:"M2tsBufferModel"`
13984
13985	// Inserts DVB Network Information Table (NIT) at the specified table repetition
13986	// interval.
13987	DvbNitSettings *DvbNitSettings `locationName:"dvbNitSettings" type:"structure"`
13988
13989	// Inserts DVB Service Description Table (NIT) at the specified table repetition
13990	// interval.
13991	DvbSdtSettings *DvbSdtSettings `locationName:"dvbSdtSettings" type:"structure"`
13992
13993	// Specify the packet identifiers (PIDs) for DVB subtitle data included in this
13994	// output. Specify multiple PIDs as a JSON array. Default is the range 460-479.
13995	DvbSubPids []*int64 `locationName:"dvbSubPids" type:"list"`
13996
13997	// Inserts DVB Time and Date Table (TDT) at the specified table repetition interval.
13998	DvbTdtSettings *DvbTdtSettings `locationName:"dvbTdtSettings" type:"structure"`
13999
14000	// Specify the packet identifier (PID) for DVB teletext data you include in
14001	// this output. Default is 499.
14002	DvbTeletextPid *int64 `locationName:"dvbTeletextPid" min:"32" type:"integer"`
14003
14004	// When set to VIDEO_AND_FIXED_INTERVALS, audio EBP markers will be added to
14005	// partitions 3 and 4. The interval between these additional markers will be
14006	// fixed, and will be slightly shorter than the video EBP marker interval. When
14007	// set to VIDEO_INTERVAL, these additional markers will not be inserted. Only
14008	// applicable when EBP segmentation markers are is selected (segmentationMarkers
14009	// is EBP or EBP_LEGACY).
14010	EbpAudioInterval *string `locationName:"ebpAudioInterval" type:"string" enum:"M2tsEbpAudioInterval"`
14011
14012	// Selects which PIDs to place EBP markers on. They can either be placed only
14013	// on the video PID, or on both the video PID and all audio PIDs. Only applicable
14014	// when EBP segmentation markers are is selected (segmentationMarkers is EBP
14015	// or EBP_LEGACY).
14016	EbpPlacement *string `locationName:"ebpPlacement" type:"string" enum:"M2tsEbpPlacement"`
14017
14018	// Controls whether to include the ES Rate field in the PES header.
14019	EsRateInPes *string `locationName:"esRateInPes" type:"string" enum:"M2tsEsRateInPes"`
14020
14021	// Keep the default value (DEFAULT) unless you know that your audio EBP markers
14022	// are incorrectly appearing before your video EBP markers. To correct this
14023	// problem, set this value to Force (FORCE).
14024	ForceTsVideoEbpOrder *string `locationName:"forceTsVideoEbpOrder" type:"string" enum:"M2tsForceTsVideoEbpOrder"`
14025
14026	// The length, in seconds, of each fragment. Only used with EBP markers.
14027	FragmentTime *float64 `locationName:"fragmentTime" type:"double"`
14028
14029	// Specify the maximum time, in milliseconds, between Program Clock References
14030	// (PCRs) inserted into the transport stream.
14031	MaxPcrInterval *int64 `locationName:"maxPcrInterval" type:"integer"`
14032
14033	// When set, enforces that Encoder Boundary Points do not come within the specified
14034	// time interval of each other by looking ahead at input video. If another EBP
14035	// is going to come in within the specified time interval, the current EBP is
14036	// not emitted, and the segment is "stretched" to the next marker. The lookahead
14037	// value does not add latency to the system. The Live Event must be configured
14038	// elsewhere to create sufficient latency to make the lookahead accurate.
14039	MinEbpInterval *int64 `locationName:"minEbpInterval" type:"integer"`
14040
14041	// If INSERT, Nielsen inaudible tones for media tracking will be detected in
14042	// the input audio and an equivalent ID3 tag will be inserted in the output.
14043	NielsenId3 *string `locationName:"nielsenId3" type:"string" enum:"M2tsNielsenId3"`
14044
14045	// Value in bits per second of extra null packets to insert into the transport
14046	// stream. This can be used if a downstream encryption system requires periodic
14047	// null packets.
14048	NullPacketBitrate *float64 `locationName:"nullPacketBitrate" type:"double"`
14049
14050	// The number of milliseconds between instances of this table in the output
14051	// transport stream.
14052	PatInterval *int64 `locationName:"patInterval" type:"integer"`
14053
14054	// When set to PCR_EVERY_PES_PACKET, a Program Clock Reference value is inserted
14055	// for every Packetized Elementary Stream (PES) header. This is effective only
14056	// when the PCR PID is the same as the video or audio elementary stream.
14057	PcrControl *string `locationName:"pcrControl" type:"string" enum:"M2tsPcrControl"`
14058
14059	// Specify the packet identifier (PID) for the program clock reference (PCR)
14060	// in this output. If you do not specify a value, the service will use the value
14061	// for Video PID (VideoPid).
14062	PcrPid *int64 `locationName:"pcrPid" min:"32" type:"integer"`
14063
14064	// Specify the number of milliseconds between instances of the program map table
14065	// (PMT) in the output transport stream.
14066	PmtInterval *int64 `locationName:"pmtInterval" type:"integer"`
14067
14068	// Specify the packet identifier (PID) for the program map table (PMT) itself.
14069	// Default is 480.
14070	PmtPid *int64 `locationName:"pmtPid" min:"32" type:"integer"`
14071
14072	// Specify the packet identifier (PID) of the private metadata stream. Default
14073	// is 503.
14074	PrivateMetadataPid *int64 `locationName:"privateMetadataPid" min:"32" type:"integer"`
14075
14076	// Use Program number (programNumber) to specify the program number used in
14077	// the program map table (PMT) for this output. Default is 1. Program numbers
14078	// and program map tables are parts of MPEG-2 transport stream containers, used
14079	// for organizing data.
14080	ProgramNumber *int64 `locationName:"programNumber" type:"integer"`
14081
14082	// When set to CBR, inserts null packets into transport stream to fill specified
14083	// bitrate. When set to VBR, the bitrate setting acts as the maximum bitrate,
14084	// but the output will not be padded up to that bitrate.
14085	RateMode *string `locationName:"rateMode" type:"string" enum:"M2tsRateMode"`
14086
14087	// Include this in your job settings to put SCTE-35 markers in your HLS and
14088	// transport stream outputs at the insertion points that you specify in an ESAM
14089	// XML document. Provide the document in the setting SCC XML (sccXml).
14090	Scte35Esam *M2tsScte35Esam `locationName:"scte35Esam" type:"structure"`
14091
14092	// Specify the packet identifier (PID) of the SCTE-35 stream in the transport
14093	// stream.
14094	Scte35Pid *int64 `locationName:"scte35Pid" min:"32" type:"integer"`
14095
14096	// For SCTE-35 markers from your input-- Choose Passthrough (PASSTHROUGH) if
14097	// you want SCTE-35 markers that appear in your input to also appear in this
14098	// output. Choose None (NONE) if you don't want SCTE-35 markers in this output.
14099	// For SCTE-35 markers from an ESAM XML document-- Choose None (NONE). Also
14100	// provide the ESAM XML as a string in the setting Signal processing notification
14101	// XML (sccXml). Also enable ESAM SCTE-35 (include the property scte35Esam).
14102	Scte35Source *string `locationName:"scte35Source" type:"string" enum:"M2tsScte35Source"`
14103
14104	// Inserts segmentation markers at each segmentation_time period. rai_segstart
14105	// sets the Random Access Indicator bit in the adaptation field. rai_adapt sets
14106	// the RAI bit and adds the current timecode in the private data bytes. psi_segstart
14107	// inserts PAT and PMT tables at the start of segments. ebp adds Encoder Boundary
14108	// Point information to the adaptation field as per OpenCable specification
14109	// OC-SP-EBP-I01-130118. ebp_legacy adds Encoder Boundary Point information
14110	// to the adaptation field using a legacy proprietary format.
14111	SegmentationMarkers *string `locationName:"segmentationMarkers" type:"string" enum:"M2tsSegmentationMarkers"`
14112
14113	// The segmentation style parameter controls how segmentation markers are inserted
14114	// into the transport stream. With avails, it is possible that segments may
14115	// be truncated, which can influence where future segmentation markers are inserted.
14116	// When a segmentation style of "reset_cadence" is selected and a segment is
14117	// truncated due to an avail, we will reset the segmentation cadence. This means
14118	// the subsequent segment will have a duration of of $segmentation_time seconds.
14119	// When a segmentation style of "maintain_cadence" is selected and a segment
14120	// is truncated due to an avail, we will not reset the segmentation cadence.
14121	// This means the subsequent segment will likely be truncated as well. However,
14122	// all segments after that will have a duration of $segmentation_time seconds.
14123	// Note that EBP lookahead is a slight exception to this rule.
14124	SegmentationStyle *string `locationName:"segmentationStyle" type:"string" enum:"M2tsSegmentationStyle"`
14125
14126	// Specify the length, in seconds, of each segment. Required unless markers
14127	// is set to _none_.
14128	SegmentationTime *float64 `locationName:"segmentationTime" type:"double"`
14129
14130	// Specify the packet identifier (PID) for timed metadata in this output. Default
14131	// is 502.
14132	TimedMetadataPid *int64 `locationName:"timedMetadataPid" min:"32" type:"integer"`
14133
14134	// Specify the ID for the transport stream itself in the program map table for
14135	// this output. Transport stream IDs and program map tables are parts of MPEG-2
14136	// transport stream containers, used for organizing data.
14137	TransportStreamId *int64 `locationName:"transportStreamId" type:"integer"`
14138
14139	// Specify the packet identifier (PID) of the elementary video stream in the
14140	// transport stream.
14141	VideoPid *int64 `locationName:"videoPid" min:"32" type:"integer"`
14142}
14143
14144// String returns the string representation
14145func (s M2tsSettings) String() string {
14146	return awsutil.Prettify(s)
14147}
14148
14149// GoString returns the string representation
14150func (s M2tsSettings) GoString() string {
14151	return s.String()
14152}
14153
14154// Validate inspects the fields of the type to determine if they are valid.
14155func (s *M2tsSettings) Validate() error {
14156	invalidParams := request.ErrInvalidParams{Context: "M2tsSettings"}
14157	if s.DvbTeletextPid != nil && *s.DvbTeletextPid < 32 {
14158		invalidParams.Add(request.NewErrParamMinValue("DvbTeletextPid", 32))
14159	}
14160	if s.PcrPid != nil && *s.PcrPid < 32 {
14161		invalidParams.Add(request.NewErrParamMinValue("PcrPid", 32))
14162	}
14163	if s.PmtPid != nil && *s.PmtPid < 32 {
14164		invalidParams.Add(request.NewErrParamMinValue("PmtPid", 32))
14165	}
14166	if s.PrivateMetadataPid != nil && *s.PrivateMetadataPid < 32 {
14167		invalidParams.Add(request.NewErrParamMinValue("PrivateMetadataPid", 32))
14168	}
14169	if s.Scte35Pid != nil && *s.Scte35Pid < 32 {
14170		invalidParams.Add(request.NewErrParamMinValue("Scte35Pid", 32))
14171	}
14172	if s.TimedMetadataPid != nil && *s.TimedMetadataPid < 32 {
14173		invalidParams.Add(request.NewErrParamMinValue("TimedMetadataPid", 32))
14174	}
14175	if s.VideoPid != nil && *s.VideoPid < 32 {
14176		invalidParams.Add(request.NewErrParamMinValue("VideoPid", 32))
14177	}
14178	if s.DvbNitSettings != nil {
14179		if err := s.DvbNitSettings.Validate(); err != nil {
14180			invalidParams.AddNested("DvbNitSettings", err.(request.ErrInvalidParams))
14181		}
14182	}
14183	if s.DvbSdtSettings != nil {
14184		if err := s.DvbSdtSettings.Validate(); err != nil {
14185			invalidParams.AddNested("DvbSdtSettings", err.(request.ErrInvalidParams))
14186		}
14187	}
14188	if s.DvbTdtSettings != nil {
14189		if err := s.DvbTdtSettings.Validate(); err != nil {
14190			invalidParams.AddNested("DvbTdtSettings", err.(request.ErrInvalidParams))
14191		}
14192	}
14193	if s.Scte35Esam != nil {
14194		if err := s.Scte35Esam.Validate(); err != nil {
14195			invalidParams.AddNested("Scte35Esam", err.(request.ErrInvalidParams))
14196		}
14197	}
14198
14199	if invalidParams.Len() > 0 {
14200		return invalidParams
14201	}
14202	return nil
14203}
14204
14205// SetAudioBufferModel sets the AudioBufferModel field's value.
14206func (s *M2tsSettings) SetAudioBufferModel(v string) *M2tsSettings {
14207	s.AudioBufferModel = &v
14208	return s
14209}
14210
14211// SetAudioDuration sets the AudioDuration field's value.
14212func (s *M2tsSettings) SetAudioDuration(v string) *M2tsSettings {
14213	s.AudioDuration = &v
14214	return s
14215}
14216
14217// SetAudioFramesPerPes sets the AudioFramesPerPes field's value.
14218func (s *M2tsSettings) SetAudioFramesPerPes(v int64) *M2tsSettings {
14219	s.AudioFramesPerPes = &v
14220	return s
14221}
14222
14223// SetAudioPids sets the AudioPids field's value.
14224func (s *M2tsSettings) SetAudioPids(v []*int64) *M2tsSettings {
14225	s.AudioPids = v
14226	return s
14227}
14228
14229// SetBitrate sets the Bitrate field's value.
14230func (s *M2tsSettings) SetBitrate(v int64) *M2tsSettings {
14231	s.Bitrate = &v
14232	return s
14233}
14234
14235// SetBufferModel sets the BufferModel field's value.
14236func (s *M2tsSettings) SetBufferModel(v string) *M2tsSettings {
14237	s.BufferModel = &v
14238	return s
14239}
14240
14241// SetDvbNitSettings sets the DvbNitSettings field's value.
14242func (s *M2tsSettings) SetDvbNitSettings(v *DvbNitSettings) *M2tsSettings {
14243	s.DvbNitSettings = v
14244	return s
14245}
14246
14247// SetDvbSdtSettings sets the DvbSdtSettings field's value.
14248func (s *M2tsSettings) SetDvbSdtSettings(v *DvbSdtSettings) *M2tsSettings {
14249	s.DvbSdtSettings = v
14250	return s
14251}
14252
14253// SetDvbSubPids sets the DvbSubPids field's value.
14254func (s *M2tsSettings) SetDvbSubPids(v []*int64) *M2tsSettings {
14255	s.DvbSubPids = v
14256	return s
14257}
14258
14259// SetDvbTdtSettings sets the DvbTdtSettings field's value.
14260func (s *M2tsSettings) SetDvbTdtSettings(v *DvbTdtSettings) *M2tsSettings {
14261	s.DvbTdtSettings = v
14262	return s
14263}
14264
14265// SetDvbTeletextPid sets the DvbTeletextPid field's value.
14266func (s *M2tsSettings) SetDvbTeletextPid(v int64) *M2tsSettings {
14267	s.DvbTeletextPid = &v
14268	return s
14269}
14270
14271// SetEbpAudioInterval sets the EbpAudioInterval field's value.
14272func (s *M2tsSettings) SetEbpAudioInterval(v string) *M2tsSettings {
14273	s.EbpAudioInterval = &v
14274	return s
14275}
14276
14277// SetEbpPlacement sets the EbpPlacement field's value.
14278func (s *M2tsSettings) SetEbpPlacement(v string) *M2tsSettings {
14279	s.EbpPlacement = &v
14280	return s
14281}
14282
14283// SetEsRateInPes sets the EsRateInPes field's value.
14284func (s *M2tsSettings) SetEsRateInPes(v string) *M2tsSettings {
14285	s.EsRateInPes = &v
14286	return s
14287}
14288
14289// SetForceTsVideoEbpOrder sets the ForceTsVideoEbpOrder field's value.
14290func (s *M2tsSettings) SetForceTsVideoEbpOrder(v string) *M2tsSettings {
14291	s.ForceTsVideoEbpOrder = &v
14292	return s
14293}
14294
14295// SetFragmentTime sets the FragmentTime field's value.
14296func (s *M2tsSettings) SetFragmentTime(v float64) *M2tsSettings {
14297	s.FragmentTime = &v
14298	return s
14299}
14300
14301// SetMaxPcrInterval sets the MaxPcrInterval field's value.
14302func (s *M2tsSettings) SetMaxPcrInterval(v int64) *M2tsSettings {
14303	s.MaxPcrInterval = &v
14304	return s
14305}
14306
14307// SetMinEbpInterval sets the MinEbpInterval field's value.
14308func (s *M2tsSettings) SetMinEbpInterval(v int64) *M2tsSettings {
14309	s.MinEbpInterval = &v
14310	return s
14311}
14312
14313// SetNielsenId3 sets the NielsenId3 field's value.
14314func (s *M2tsSettings) SetNielsenId3(v string) *M2tsSettings {
14315	s.NielsenId3 = &v
14316	return s
14317}
14318
14319// SetNullPacketBitrate sets the NullPacketBitrate field's value.
14320func (s *M2tsSettings) SetNullPacketBitrate(v float64) *M2tsSettings {
14321	s.NullPacketBitrate = &v
14322	return s
14323}
14324
14325// SetPatInterval sets the PatInterval field's value.
14326func (s *M2tsSettings) SetPatInterval(v int64) *M2tsSettings {
14327	s.PatInterval = &v
14328	return s
14329}
14330
14331// SetPcrControl sets the PcrControl field's value.
14332func (s *M2tsSettings) SetPcrControl(v string) *M2tsSettings {
14333	s.PcrControl = &v
14334	return s
14335}
14336
14337// SetPcrPid sets the PcrPid field's value.
14338func (s *M2tsSettings) SetPcrPid(v int64) *M2tsSettings {
14339	s.PcrPid = &v
14340	return s
14341}
14342
14343// SetPmtInterval sets the PmtInterval field's value.
14344func (s *M2tsSettings) SetPmtInterval(v int64) *M2tsSettings {
14345	s.PmtInterval = &v
14346	return s
14347}
14348
14349// SetPmtPid sets the PmtPid field's value.
14350func (s *M2tsSettings) SetPmtPid(v int64) *M2tsSettings {
14351	s.PmtPid = &v
14352	return s
14353}
14354
14355// SetPrivateMetadataPid sets the PrivateMetadataPid field's value.
14356func (s *M2tsSettings) SetPrivateMetadataPid(v int64) *M2tsSettings {
14357	s.PrivateMetadataPid = &v
14358	return s
14359}
14360
14361// SetProgramNumber sets the ProgramNumber field's value.
14362func (s *M2tsSettings) SetProgramNumber(v int64) *M2tsSettings {
14363	s.ProgramNumber = &v
14364	return s
14365}
14366
14367// SetRateMode sets the RateMode field's value.
14368func (s *M2tsSettings) SetRateMode(v string) *M2tsSettings {
14369	s.RateMode = &v
14370	return s
14371}
14372
14373// SetScte35Esam sets the Scte35Esam field's value.
14374func (s *M2tsSettings) SetScte35Esam(v *M2tsScte35Esam) *M2tsSettings {
14375	s.Scte35Esam = v
14376	return s
14377}
14378
14379// SetScte35Pid sets the Scte35Pid field's value.
14380func (s *M2tsSettings) SetScte35Pid(v int64) *M2tsSettings {
14381	s.Scte35Pid = &v
14382	return s
14383}
14384
14385// SetScte35Source sets the Scte35Source field's value.
14386func (s *M2tsSettings) SetScte35Source(v string) *M2tsSettings {
14387	s.Scte35Source = &v
14388	return s
14389}
14390
14391// SetSegmentationMarkers sets the SegmentationMarkers field's value.
14392func (s *M2tsSettings) SetSegmentationMarkers(v string) *M2tsSettings {
14393	s.SegmentationMarkers = &v
14394	return s
14395}
14396
14397// SetSegmentationStyle sets the SegmentationStyle field's value.
14398func (s *M2tsSettings) SetSegmentationStyle(v string) *M2tsSettings {
14399	s.SegmentationStyle = &v
14400	return s
14401}
14402
14403// SetSegmentationTime sets the SegmentationTime field's value.
14404func (s *M2tsSettings) SetSegmentationTime(v float64) *M2tsSettings {
14405	s.SegmentationTime = &v
14406	return s
14407}
14408
14409// SetTimedMetadataPid sets the TimedMetadataPid field's value.
14410func (s *M2tsSettings) SetTimedMetadataPid(v int64) *M2tsSettings {
14411	s.TimedMetadataPid = &v
14412	return s
14413}
14414
14415// SetTransportStreamId sets the TransportStreamId field's value.
14416func (s *M2tsSettings) SetTransportStreamId(v int64) *M2tsSettings {
14417	s.TransportStreamId = &v
14418	return s
14419}
14420
14421// SetVideoPid sets the VideoPid field's value.
14422func (s *M2tsSettings) SetVideoPid(v int64) *M2tsSettings {
14423	s.VideoPid = &v
14424	return s
14425}
14426
14427// Settings for TS segments in HLS
14428type M3u8Settings struct {
14429	_ struct{} `type:"structure"`
14430
14431	// Specify this setting only when your output will be consumed by a downstream
14432	// repackaging workflow that is sensitive to very small duration differences
14433	// between video and audio. For this situation, choose Match video duration
14434	// (MATCH_VIDEO_DURATION). In all other cases, keep the default value, Default
14435	// codec duration (DEFAULT_CODEC_DURATION). When you choose Match video duration,
14436	// MediaConvert pads the output audio streams with silence or trims them to
14437	// ensure that the total duration of each audio stream is at least as long as
14438	// the total duration of the video stream. After padding or trimming, the audio
14439	// stream duration is no more than one frame longer than the video stream. MediaConvert
14440	// applies audio padding or trimming only to the end of the last segment of
14441	// the output. For unsegmented outputs, MediaConvert adds padding only to the
14442	// end of the file. When you keep the default value, any minor discrepancies
14443	// between audio and video duration will depend on your output audio codec.
14444	AudioDuration *string `locationName:"audioDuration" type:"string" enum:"M3u8AudioDuration"`
14445
14446	// The number of audio frames to insert for each PES packet.
14447	AudioFramesPerPes *int64 `locationName:"audioFramesPerPes" type:"integer"`
14448
14449	// Packet Identifier (PID) of the elementary audio stream(s) in the transport
14450	// stream. Multiple values are accepted, and can be entered in ranges and/or
14451	// by comma separation.
14452	AudioPids []*int64 `locationName:"audioPids" type:"list"`
14453
14454	// If INSERT, Nielsen inaudible tones for media tracking will be detected in
14455	// the input audio and an equivalent ID3 tag will be inserted in the output.
14456	NielsenId3 *string `locationName:"nielsenId3" type:"string" enum:"M3u8NielsenId3"`
14457
14458	// The number of milliseconds between instances of this table in the output
14459	// transport stream.
14460	PatInterval *int64 `locationName:"patInterval" type:"integer"`
14461
14462	// When set to PCR_EVERY_PES_PACKET a Program Clock Reference value is inserted
14463	// for every Packetized Elementary Stream (PES) header. This parameter is effective
14464	// only when the PCR PID is the same as the video or audio elementary stream.
14465	PcrControl *string `locationName:"pcrControl" type:"string" enum:"M3u8PcrControl"`
14466
14467	// Packet Identifier (PID) of the Program Clock Reference (PCR) in the transport
14468	// stream. When no value is given, the encoder will assign the same value as
14469	// the Video PID.
14470	PcrPid *int64 `locationName:"pcrPid" min:"32" type:"integer"`
14471
14472	// The number of milliseconds between instances of this table in the output
14473	// transport stream.
14474	PmtInterval *int64 `locationName:"pmtInterval" type:"integer"`
14475
14476	// Packet Identifier (PID) for the Program Map Table (PMT) in the transport
14477	// stream.
14478	PmtPid *int64 `locationName:"pmtPid" min:"32" type:"integer"`
14479
14480	// Packet Identifier (PID) of the private metadata stream in the transport stream.
14481	PrivateMetadataPid *int64 `locationName:"privateMetadataPid" min:"32" type:"integer"`
14482
14483	// The value of the program number field in the Program Map Table.
14484	ProgramNumber *int64 `locationName:"programNumber" type:"integer"`
14485
14486	// Packet Identifier (PID) of the SCTE-35 stream in the transport stream.
14487	Scte35Pid *int64 `locationName:"scte35Pid" min:"32" type:"integer"`
14488
14489	// For SCTE-35 markers from your input-- Choose Passthrough (PASSTHROUGH) if
14490	// you want SCTE-35 markers that appear in your input to also appear in this
14491	// output. Choose None (NONE) if you don't want SCTE-35 markers in this output.
14492	// For SCTE-35 markers from an ESAM XML document-- Choose None (NONE) if you
14493	// don't want manifest conditioning. Choose Passthrough (PASSTHROUGH) and choose
14494	// Ad markers (adMarkers) if you do want manifest conditioning. In both cases,
14495	// also provide the ESAM XML as a string in the setting Signal processing notification
14496	// XML (sccXml).
14497	Scte35Source *string `locationName:"scte35Source" type:"string" enum:"M3u8Scte35Source"`
14498
14499	// Applies only to HLS outputs. Use this setting to specify whether the service
14500	// inserts the ID3 timed metadata from the input in this output.
14501	TimedMetadata *string `locationName:"timedMetadata" type:"string" enum:"TimedMetadata"`
14502
14503	// Packet Identifier (PID) of the timed metadata stream in the transport stream.
14504	TimedMetadataPid *int64 `locationName:"timedMetadataPid" min:"32" type:"integer"`
14505
14506	// The value of the transport stream ID field in the Program Map Table.
14507	TransportStreamId *int64 `locationName:"transportStreamId" type:"integer"`
14508
14509	// Packet Identifier (PID) of the elementary video stream in the transport stream.
14510	VideoPid *int64 `locationName:"videoPid" min:"32" type:"integer"`
14511}
14512
14513// String returns the string representation
14514func (s M3u8Settings) String() string {
14515	return awsutil.Prettify(s)
14516}
14517
14518// GoString returns the string representation
14519func (s M3u8Settings) GoString() string {
14520	return s.String()
14521}
14522
14523// Validate inspects the fields of the type to determine if they are valid.
14524func (s *M3u8Settings) Validate() error {
14525	invalidParams := request.ErrInvalidParams{Context: "M3u8Settings"}
14526	if s.PcrPid != nil && *s.PcrPid < 32 {
14527		invalidParams.Add(request.NewErrParamMinValue("PcrPid", 32))
14528	}
14529	if s.PmtPid != nil && *s.PmtPid < 32 {
14530		invalidParams.Add(request.NewErrParamMinValue("PmtPid", 32))
14531	}
14532	if s.PrivateMetadataPid != nil && *s.PrivateMetadataPid < 32 {
14533		invalidParams.Add(request.NewErrParamMinValue("PrivateMetadataPid", 32))
14534	}
14535	if s.Scte35Pid != nil && *s.Scte35Pid < 32 {
14536		invalidParams.Add(request.NewErrParamMinValue("Scte35Pid", 32))
14537	}
14538	if s.TimedMetadataPid != nil && *s.TimedMetadataPid < 32 {
14539		invalidParams.Add(request.NewErrParamMinValue("TimedMetadataPid", 32))
14540	}
14541	if s.VideoPid != nil && *s.VideoPid < 32 {
14542		invalidParams.Add(request.NewErrParamMinValue("VideoPid", 32))
14543	}
14544
14545	if invalidParams.Len() > 0 {
14546		return invalidParams
14547	}
14548	return nil
14549}
14550
14551// SetAudioDuration sets the AudioDuration field's value.
14552func (s *M3u8Settings) SetAudioDuration(v string) *M3u8Settings {
14553	s.AudioDuration = &v
14554	return s
14555}
14556
14557// SetAudioFramesPerPes sets the AudioFramesPerPes field's value.
14558func (s *M3u8Settings) SetAudioFramesPerPes(v int64) *M3u8Settings {
14559	s.AudioFramesPerPes = &v
14560	return s
14561}
14562
14563// SetAudioPids sets the AudioPids field's value.
14564func (s *M3u8Settings) SetAudioPids(v []*int64) *M3u8Settings {
14565	s.AudioPids = v
14566	return s
14567}
14568
14569// SetNielsenId3 sets the NielsenId3 field's value.
14570func (s *M3u8Settings) SetNielsenId3(v string) *M3u8Settings {
14571	s.NielsenId3 = &v
14572	return s
14573}
14574
14575// SetPatInterval sets the PatInterval field's value.
14576func (s *M3u8Settings) SetPatInterval(v int64) *M3u8Settings {
14577	s.PatInterval = &v
14578	return s
14579}
14580
14581// SetPcrControl sets the PcrControl field's value.
14582func (s *M3u8Settings) SetPcrControl(v string) *M3u8Settings {
14583	s.PcrControl = &v
14584	return s
14585}
14586
14587// SetPcrPid sets the PcrPid field's value.
14588func (s *M3u8Settings) SetPcrPid(v int64) *M3u8Settings {
14589	s.PcrPid = &v
14590	return s
14591}
14592
14593// SetPmtInterval sets the PmtInterval field's value.
14594func (s *M3u8Settings) SetPmtInterval(v int64) *M3u8Settings {
14595	s.PmtInterval = &v
14596	return s
14597}
14598
14599// SetPmtPid sets the PmtPid field's value.
14600func (s *M3u8Settings) SetPmtPid(v int64) *M3u8Settings {
14601	s.PmtPid = &v
14602	return s
14603}
14604
14605// SetPrivateMetadataPid sets the PrivateMetadataPid field's value.
14606func (s *M3u8Settings) SetPrivateMetadataPid(v int64) *M3u8Settings {
14607	s.PrivateMetadataPid = &v
14608	return s
14609}
14610
14611// SetProgramNumber sets the ProgramNumber field's value.
14612func (s *M3u8Settings) SetProgramNumber(v int64) *M3u8Settings {
14613	s.ProgramNumber = &v
14614	return s
14615}
14616
14617// SetScte35Pid sets the Scte35Pid field's value.
14618func (s *M3u8Settings) SetScte35Pid(v int64) *M3u8Settings {
14619	s.Scte35Pid = &v
14620	return s
14621}
14622
14623// SetScte35Source sets the Scte35Source field's value.
14624func (s *M3u8Settings) SetScte35Source(v string) *M3u8Settings {
14625	s.Scte35Source = &v
14626	return s
14627}
14628
14629// SetTimedMetadata sets the TimedMetadata field's value.
14630func (s *M3u8Settings) SetTimedMetadata(v string) *M3u8Settings {
14631	s.TimedMetadata = &v
14632	return s
14633}
14634
14635// SetTimedMetadataPid sets the TimedMetadataPid field's value.
14636func (s *M3u8Settings) SetTimedMetadataPid(v int64) *M3u8Settings {
14637	s.TimedMetadataPid = &v
14638	return s
14639}
14640
14641// SetTransportStreamId sets the TransportStreamId field's value.
14642func (s *M3u8Settings) SetTransportStreamId(v int64) *M3u8Settings {
14643	s.TransportStreamId = &v
14644	return s
14645}
14646
14647// SetVideoPid sets the VideoPid field's value.
14648func (s *M3u8Settings) SetVideoPid(v int64) *M3u8Settings {
14649	s.VideoPid = &v
14650	return s
14651}
14652
14653// Overlay motion graphics on top of your video at the time that you specify.
14654type MotionImageInserter struct {
14655	_ struct{} `type:"structure"`
14656
14657	// If your motion graphic asset is a .mov file, keep this setting unspecified.
14658	// If your motion graphic asset is a series of .png files, specify the frame
14659	// rate of the overlay in frames per second, as a fraction. For example, specify
14660	// 24 fps as 24/1. Make sure that the number of images in your series matches
14661	// the frame rate and your intended overlay duration. For example, if you want
14662	// a 30-second overlay at 30 fps, you should have 900 .png images. This overlay
14663	// frame rate doesn't need to match the frame rate of the underlying video.
14664	Framerate *MotionImageInsertionFramerate `locationName:"framerate" type:"structure"`
14665
14666	// Specify the .mov file or series of .png files that you want to overlay on
14667	// your video. For .png files, provide the file name of the first file in the
14668	// series. Make sure that the names of the .png files end with sequential numbers
14669	// that specify the order that they are played in. For example, overlay_000.png,
14670	// overlay_001.png, overlay_002.png, and so on. The sequence must start at zero,
14671	// and each image file name must have the same number of digits. Pad your initial
14672	// file names with enough zeros to complete the sequence. For example, if the
14673	// first image is overlay_0.png, there can be only 10 images in the sequence,
14674	// with the last image being overlay_9.png. But if the first image is overlay_00.png,
14675	// there can be 100 images in the sequence.
14676	Input *string `locationName:"input" min:"14" type:"string"`
14677
14678	// Choose the type of motion graphic asset that you are providing for your overlay.
14679	// You can choose either a .mov file or a series of .png files.
14680	InsertionMode *string `locationName:"insertionMode" type:"string" enum:"MotionImageInsertionMode"`
14681
14682	// Use Offset to specify the placement of your motion graphic overlay on the
14683	// video frame. Specify in pixels, from the upper-left corner of the frame.
14684	// If you don't specify an offset, the service scales your overlay to the full
14685	// size of the frame. Otherwise, the service inserts the overlay at its native
14686	// resolution and scales the size up or down with any video scaling.
14687	Offset *MotionImageInsertionOffset `locationName:"offset" type:"structure"`
14688
14689	// Specify whether your motion graphic overlay repeats on a loop or plays only
14690	// once.
14691	Playback *string `locationName:"playback" type:"string" enum:"MotionImagePlayback"`
14692
14693	// Specify when the motion overlay begins. Use timecode format (HH:MM:SS:FF
14694	// or HH:MM:SS;FF). Make sure that the timecode you provide here takes into
14695	// account how you have set up your timecode configuration under both job settings
14696	// and input settings. The simplest way to do that is to set both to start at
14697	// 0. If you need to set up your job to follow timecodes embedded in your source
14698	// that don't start at zero, make sure that you specify a start time that is
14699	// after the first embedded timecode. For more information, see https://docs.aws.amazon.com/mediaconvert/latest/ug/setting-up-timecode.html
14700	// Find job-wide and input timecode configuration settings in your JSON job
14701	// settings specification at settings>timecodeConfig>source and settings>inputs>timecodeSource.
14702	StartTime *string `locationName:"startTime" min:"11" type:"string"`
14703}
14704
14705// String returns the string representation
14706func (s MotionImageInserter) String() string {
14707	return awsutil.Prettify(s)
14708}
14709
14710// GoString returns the string representation
14711func (s MotionImageInserter) GoString() string {
14712	return s.String()
14713}
14714
14715// Validate inspects the fields of the type to determine if they are valid.
14716func (s *MotionImageInserter) Validate() error {
14717	invalidParams := request.ErrInvalidParams{Context: "MotionImageInserter"}
14718	if s.Input != nil && len(*s.Input) < 14 {
14719		invalidParams.Add(request.NewErrParamMinLen("Input", 14))
14720	}
14721	if s.StartTime != nil && len(*s.StartTime) < 11 {
14722		invalidParams.Add(request.NewErrParamMinLen("StartTime", 11))
14723	}
14724	if s.Framerate != nil {
14725		if err := s.Framerate.Validate(); err != nil {
14726			invalidParams.AddNested("Framerate", err.(request.ErrInvalidParams))
14727		}
14728	}
14729
14730	if invalidParams.Len() > 0 {
14731		return invalidParams
14732	}
14733	return nil
14734}
14735
14736// SetFramerate sets the Framerate field's value.
14737func (s *MotionImageInserter) SetFramerate(v *MotionImageInsertionFramerate) *MotionImageInserter {
14738	s.Framerate = v
14739	return s
14740}
14741
14742// SetInput sets the Input field's value.
14743func (s *MotionImageInserter) SetInput(v string) *MotionImageInserter {
14744	s.Input = &v
14745	return s
14746}
14747
14748// SetInsertionMode sets the InsertionMode field's value.
14749func (s *MotionImageInserter) SetInsertionMode(v string) *MotionImageInserter {
14750	s.InsertionMode = &v
14751	return s
14752}
14753
14754// SetOffset sets the Offset field's value.
14755func (s *MotionImageInserter) SetOffset(v *MotionImageInsertionOffset) *MotionImageInserter {
14756	s.Offset = v
14757	return s
14758}
14759
14760// SetPlayback sets the Playback field's value.
14761func (s *MotionImageInserter) SetPlayback(v string) *MotionImageInserter {
14762	s.Playback = &v
14763	return s
14764}
14765
14766// SetStartTime sets the StartTime field's value.
14767func (s *MotionImageInserter) SetStartTime(v string) *MotionImageInserter {
14768	s.StartTime = &v
14769	return s
14770}
14771
14772// For motion overlays that don't have a built-in frame rate, specify the frame
14773// rate of the overlay in frames per second, as a fraction. For example, specify
14774// 24 fps as 24/1. The overlay frame rate doesn't need to match the frame rate
14775// of the underlying video.
14776type MotionImageInsertionFramerate struct {
14777	_ struct{} `type:"structure"`
14778
14779	// The bottom of the fraction that expresses your overlay frame rate. For example,
14780	// if your frame rate is 24 fps, set this value to 1.
14781	FramerateDenominator *int64 `locationName:"framerateDenominator" min:"1" type:"integer"`
14782
14783	// The top of the fraction that expresses your overlay frame rate. For example,
14784	// if your frame rate is 24 fps, set this value to 24.
14785	FramerateNumerator *int64 `locationName:"framerateNumerator" min:"1" type:"integer"`
14786}
14787
14788// String returns the string representation
14789func (s MotionImageInsertionFramerate) String() string {
14790	return awsutil.Prettify(s)
14791}
14792
14793// GoString returns the string representation
14794func (s MotionImageInsertionFramerate) GoString() string {
14795	return s.String()
14796}
14797
14798// Validate inspects the fields of the type to determine if they are valid.
14799func (s *MotionImageInsertionFramerate) Validate() error {
14800	invalidParams := request.ErrInvalidParams{Context: "MotionImageInsertionFramerate"}
14801	if s.FramerateDenominator != nil && *s.FramerateDenominator < 1 {
14802		invalidParams.Add(request.NewErrParamMinValue("FramerateDenominator", 1))
14803	}
14804	if s.FramerateNumerator != nil && *s.FramerateNumerator < 1 {
14805		invalidParams.Add(request.NewErrParamMinValue("FramerateNumerator", 1))
14806	}
14807
14808	if invalidParams.Len() > 0 {
14809		return invalidParams
14810	}
14811	return nil
14812}
14813
14814// SetFramerateDenominator sets the FramerateDenominator field's value.
14815func (s *MotionImageInsertionFramerate) SetFramerateDenominator(v int64) *MotionImageInsertionFramerate {
14816	s.FramerateDenominator = &v
14817	return s
14818}
14819
14820// SetFramerateNumerator sets the FramerateNumerator field's value.
14821func (s *MotionImageInsertionFramerate) SetFramerateNumerator(v int64) *MotionImageInsertionFramerate {
14822	s.FramerateNumerator = &v
14823	return s
14824}
14825
14826// Specify the offset between the upper-left corner of the video frame and the
14827// top left corner of the overlay.
14828type MotionImageInsertionOffset struct {
14829	_ struct{} `type:"structure"`
14830
14831	// Set the distance, in pixels, between the overlay and the left edge of the
14832	// video frame.
14833	ImageX *int64 `locationName:"imageX" type:"integer"`
14834
14835	// Set the distance, in pixels, between the overlay and the top edge of the
14836	// video frame.
14837	ImageY *int64 `locationName:"imageY" type:"integer"`
14838}
14839
14840// String returns the string representation
14841func (s MotionImageInsertionOffset) String() string {
14842	return awsutil.Prettify(s)
14843}
14844
14845// GoString returns the string representation
14846func (s MotionImageInsertionOffset) GoString() string {
14847	return s.String()
14848}
14849
14850// SetImageX sets the ImageX field's value.
14851func (s *MotionImageInsertionOffset) SetImageX(v int64) *MotionImageInsertionOffset {
14852	s.ImageX = &v
14853	return s
14854}
14855
14856// SetImageY sets the ImageY field's value.
14857func (s *MotionImageInsertionOffset) SetImageY(v int64) *MotionImageInsertionOffset {
14858	s.ImageY = &v
14859	return s
14860}
14861
14862// Settings for MOV Container.
14863type MovSettings struct {
14864	_ struct{} `type:"structure"`
14865
14866	// When enabled, include 'clap' atom if appropriate for the video output settings.
14867	ClapAtom *string `locationName:"clapAtom" type:"string" enum:"MovClapAtom"`
14868
14869	// When enabled, file composition times will start at zero, composition times
14870	// in the 'ctts' (composition time to sample) box for B-frames will be negative,
14871	// and a 'cslg' (composition shift least greatest) box will be included per
14872	// 14496-1 amendment 1. This improves compatibility with Apple players and tools.
14873	CslgAtom *string `locationName:"cslgAtom" type:"string" enum:"MovCslgAtom"`
14874
14875	// When set to XDCAM, writes MPEG2 video streams into the QuickTime file using
14876	// XDCAM fourcc codes. This increases compatibility with Apple editors and players,
14877	// but may decrease compatibility with other players. Only applicable when the
14878	// video codec is MPEG2.
14879	Mpeg2FourCCControl *string `locationName:"mpeg2FourCCControl" type:"string" enum:"MovMpeg2FourCCControl"`
14880
14881	// To make this output compatible with Omenon, keep the default value, OMNEON.
14882	// Unless you need Omneon compatibility, set this value to NONE. When you keep
14883	// the default value, OMNEON, MediaConvert increases the length of the edit
14884	// list atom. This might cause file rejections when a recipient of the output
14885	// file doesn't expct this extra padding.
14886	PaddingControl *string `locationName:"paddingControl" type:"string" enum:"MovPaddingControl"`
14887
14888	// Always keep the default value (SELF_CONTAINED) for this setting.
14889	Reference *string `locationName:"reference" type:"string" enum:"MovReference"`
14890}
14891
14892// String returns the string representation
14893func (s MovSettings) String() string {
14894	return awsutil.Prettify(s)
14895}
14896
14897// GoString returns the string representation
14898func (s MovSettings) GoString() string {
14899	return s.String()
14900}
14901
14902// SetClapAtom sets the ClapAtom field's value.
14903func (s *MovSettings) SetClapAtom(v string) *MovSettings {
14904	s.ClapAtom = &v
14905	return s
14906}
14907
14908// SetCslgAtom sets the CslgAtom field's value.
14909func (s *MovSettings) SetCslgAtom(v string) *MovSettings {
14910	s.CslgAtom = &v
14911	return s
14912}
14913
14914// SetMpeg2FourCCControl sets the Mpeg2FourCCControl field's value.
14915func (s *MovSettings) SetMpeg2FourCCControl(v string) *MovSettings {
14916	s.Mpeg2FourCCControl = &v
14917	return s
14918}
14919
14920// SetPaddingControl sets the PaddingControl field's value.
14921func (s *MovSettings) SetPaddingControl(v string) *MovSettings {
14922	s.PaddingControl = &v
14923	return s
14924}
14925
14926// SetReference sets the Reference field's value.
14927func (s *MovSettings) SetReference(v string) *MovSettings {
14928	s.Reference = &v
14929	return s
14930}
14931
14932// Required when you set (Codec) under (AudioDescriptions)>(CodecSettings) to
14933// the value MP2.
14934type Mp2Settings struct {
14935	_ struct{} `type:"structure"`
14936
14937	// Specify the average bitrate in bits per second.
14938	Bitrate *int64 `locationName:"bitrate" min:"32000" type:"integer"`
14939
14940	// Set Channels to specify the number of channels in this output audio track.
14941	// Choosing Mono in the console will give you 1 output channel; choosing Stereo
14942	// will give you 2. In the API, valid values are 1 and 2.
14943	Channels *int64 `locationName:"channels" min:"1" type:"integer"`
14944
14945	// Sample rate in hz.
14946	SampleRate *int64 `locationName:"sampleRate" min:"32000" type:"integer"`
14947}
14948
14949// String returns the string representation
14950func (s Mp2Settings) String() string {
14951	return awsutil.Prettify(s)
14952}
14953
14954// GoString returns the string representation
14955func (s Mp2Settings) GoString() string {
14956	return s.String()
14957}
14958
14959// Validate inspects the fields of the type to determine if they are valid.
14960func (s *Mp2Settings) Validate() error {
14961	invalidParams := request.ErrInvalidParams{Context: "Mp2Settings"}
14962	if s.Bitrate != nil && *s.Bitrate < 32000 {
14963		invalidParams.Add(request.NewErrParamMinValue("Bitrate", 32000))
14964	}
14965	if s.Channels != nil && *s.Channels < 1 {
14966		invalidParams.Add(request.NewErrParamMinValue("Channels", 1))
14967	}
14968	if s.SampleRate != nil && *s.SampleRate < 32000 {
14969		invalidParams.Add(request.NewErrParamMinValue("SampleRate", 32000))
14970	}
14971
14972	if invalidParams.Len() > 0 {
14973		return invalidParams
14974	}
14975	return nil
14976}
14977
14978// SetBitrate sets the Bitrate field's value.
14979func (s *Mp2Settings) SetBitrate(v int64) *Mp2Settings {
14980	s.Bitrate = &v
14981	return s
14982}
14983
14984// SetChannels sets the Channels field's value.
14985func (s *Mp2Settings) SetChannels(v int64) *Mp2Settings {
14986	s.Channels = &v
14987	return s
14988}
14989
14990// SetSampleRate sets the SampleRate field's value.
14991func (s *Mp2Settings) SetSampleRate(v int64) *Mp2Settings {
14992	s.SampleRate = &v
14993	return s
14994}
14995
14996// Required when you set Codec, under AudioDescriptions>CodecSettings, to the
14997// value MP3.
14998type Mp3Settings struct {
14999	_ struct{} `type:"structure"`
15000
15001	// Specify the average bitrate in bits per second.
15002	Bitrate *int64 `locationName:"bitrate" min:"16000" type:"integer"`
15003
15004	// Specify the number of channels in this output audio track. Choosing Mono
15005	// on the console gives you 1 output channel; choosing Stereo gives you 2. In
15006	// the API, valid values are 1 and 2.
15007	Channels *int64 `locationName:"channels" min:"1" type:"integer"`
15008
15009	// Specify whether the service encodes this MP3 audio output with a constant
15010	// bitrate (CBR) or a variable bitrate (VBR).
15011	RateControlMode *string `locationName:"rateControlMode" type:"string" enum:"Mp3RateControlMode"`
15012
15013	// Sample rate in hz.
15014	SampleRate *int64 `locationName:"sampleRate" min:"22050" type:"integer"`
15015
15016	// Required when you set Bitrate control mode (rateControlMode) to VBR. Specify
15017	// the audio quality of this MP3 output from 0 (highest quality) to 9 (lowest
15018	// quality).
15019	VbrQuality *int64 `locationName:"vbrQuality" type:"integer"`
15020}
15021
15022// String returns the string representation
15023func (s Mp3Settings) String() string {
15024	return awsutil.Prettify(s)
15025}
15026
15027// GoString returns the string representation
15028func (s Mp3Settings) GoString() string {
15029	return s.String()
15030}
15031
15032// Validate inspects the fields of the type to determine if they are valid.
15033func (s *Mp3Settings) Validate() error {
15034	invalidParams := request.ErrInvalidParams{Context: "Mp3Settings"}
15035	if s.Bitrate != nil && *s.Bitrate < 16000 {
15036		invalidParams.Add(request.NewErrParamMinValue("Bitrate", 16000))
15037	}
15038	if s.Channels != nil && *s.Channels < 1 {
15039		invalidParams.Add(request.NewErrParamMinValue("Channels", 1))
15040	}
15041	if s.SampleRate != nil && *s.SampleRate < 22050 {
15042		invalidParams.Add(request.NewErrParamMinValue("SampleRate", 22050))
15043	}
15044
15045	if invalidParams.Len() > 0 {
15046		return invalidParams
15047	}
15048	return nil
15049}
15050
15051// SetBitrate sets the Bitrate field's value.
15052func (s *Mp3Settings) SetBitrate(v int64) *Mp3Settings {
15053	s.Bitrate = &v
15054	return s
15055}
15056
15057// SetChannels sets the Channels field's value.
15058func (s *Mp3Settings) SetChannels(v int64) *Mp3Settings {
15059	s.Channels = &v
15060	return s
15061}
15062
15063// SetRateControlMode sets the RateControlMode field's value.
15064func (s *Mp3Settings) SetRateControlMode(v string) *Mp3Settings {
15065	s.RateControlMode = &v
15066	return s
15067}
15068
15069// SetSampleRate sets the SampleRate field's value.
15070func (s *Mp3Settings) SetSampleRate(v int64) *Mp3Settings {
15071	s.SampleRate = &v
15072	return s
15073}
15074
15075// SetVbrQuality sets the VbrQuality field's value.
15076func (s *Mp3Settings) SetVbrQuality(v int64) *Mp3Settings {
15077	s.VbrQuality = &v
15078	return s
15079}
15080
15081// Settings for MP4 container. You can create audio-only AAC outputs with this
15082// container.
15083type Mp4Settings struct {
15084	_ struct{} `type:"structure"`
15085
15086	// Specify this setting only when your output will be consumed by a downstream
15087	// repackaging workflow that is sensitive to very small duration differences
15088	// between video and audio. For this situation, choose Match video duration
15089	// (MATCH_VIDEO_DURATION). In all other cases, keep the default value, Default
15090	// codec duration (DEFAULT_CODEC_DURATION). When you choose Match video duration,
15091	// MediaConvert pads the output audio streams with silence or trims them to
15092	// ensure that the total duration of each audio stream is at least as long as
15093	// the total duration of the video stream. After padding or trimming, the audio
15094	// stream duration is no more than one frame longer than the video stream. MediaConvert
15095	// applies audio padding or trimming only to the end of the last segment of
15096	// the output. For unsegmented outputs, MediaConvert adds padding only to the
15097	// end of the file. When you keep the default value, any minor discrepancies
15098	// between audio and video duration will depend on your output audio codec.
15099	AudioDuration *string `locationName:"audioDuration" type:"string" enum:"CmfcAudioDuration"`
15100
15101	// When enabled, file composition times will start at zero, composition times
15102	// in the 'ctts' (composition time to sample) box for B-frames will be negative,
15103	// and a 'cslg' (composition shift least greatest) box will be included per
15104	// 14496-1 amendment 1. This improves compatibility with Apple players and tools.
15105	CslgAtom *string `locationName:"cslgAtom" type:"string" enum:"Mp4CslgAtom"`
15106
15107	// Ignore this setting unless compliance to the CTTS box version specification
15108	// matters in your workflow. Specify a value of 1 to set your CTTS box version
15109	// to 1 and make your output compliant with the specification. When you specify
15110	// a value of 1, you must also set CSLG atom (cslgAtom) to the value INCLUDE.
15111	// Keep the default value 0 to set your CTTS box version to 0. This can provide
15112	// backward compatibility for some players and packagers.
15113	CttsVersion *int64 `locationName:"cttsVersion" type:"integer"`
15114
15115	// Inserts a free-space box immediately after the moov box.
15116	FreeSpaceBox *string `locationName:"freeSpaceBox" type:"string" enum:"Mp4FreeSpaceBox"`
15117
15118	// If set to PROGRESSIVE_DOWNLOAD, the MOOV atom is relocated to the beginning
15119	// of the archive as required for progressive downloading. Otherwise it is placed
15120	// normally at the end.
15121	MoovPlacement *string `locationName:"moovPlacement" type:"string" enum:"Mp4MoovPlacement"`
15122
15123	// Overrides the "Major Brand" field in the output file. Usually not necessary
15124	// to specify.
15125	Mp4MajorBrand *string `locationName:"mp4MajorBrand" type:"string"`
15126}
15127
15128// String returns the string representation
15129func (s Mp4Settings) String() string {
15130	return awsutil.Prettify(s)
15131}
15132
15133// GoString returns the string representation
15134func (s Mp4Settings) GoString() string {
15135	return s.String()
15136}
15137
15138// SetAudioDuration sets the AudioDuration field's value.
15139func (s *Mp4Settings) SetAudioDuration(v string) *Mp4Settings {
15140	s.AudioDuration = &v
15141	return s
15142}
15143
15144// SetCslgAtom sets the CslgAtom field's value.
15145func (s *Mp4Settings) SetCslgAtom(v string) *Mp4Settings {
15146	s.CslgAtom = &v
15147	return s
15148}
15149
15150// SetCttsVersion sets the CttsVersion field's value.
15151func (s *Mp4Settings) SetCttsVersion(v int64) *Mp4Settings {
15152	s.CttsVersion = &v
15153	return s
15154}
15155
15156// SetFreeSpaceBox sets the FreeSpaceBox field's value.
15157func (s *Mp4Settings) SetFreeSpaceBox(v string) *Mp4Settings {
15158	s.FreeSpaceBox = &v
15159	return s
15160}
15161
15162// SetMoovPlacement sets the MoovPlacement field's value.
15163func (s *Mp4Settings) SetMoovPlacement(v string) *Mp4Settings {
15164	s.MoovPlacement = &v
15165	return s
15166}
15167
15168// SetMp4MajorBrand sets the Mp4MajorBrand field's value.
15169func (s *Mp4Settings) SetMp4MajorBrand(v string) *Mp4Settings {
15170	s.Mp4MajorBrand = &v
15171	return s
15172}
15173
15174// Settings for MP4 segments in DASH
15175type MpdSettings struct {
15176	_ struct{} `type:"structure"`
15177
15178	// Optional. Choose Include (INCLUDE) to have MediaConvert mark up your DASH
15179	// manifest with elements for embedded 608 captions. This markup isn't generally
15180	// required, but some video players require it to discover and play embedded
15181	// 608 captions. Keep the default value, Exclude (EXCLUDE), to leave these elements
15182	// out. When you enable this setting, this is the markup that MediaConvert includes
15183	// in your manifest:
15184	AccessibilityCaptionHints *string `locationName:"accessibilityCaptionHints" type:"string" enum:"MpdAccessibilityCaptionHints"`
15185
15186	// Specify this setting only when your output will be consumed by a downstream
15187	// repackaging workflow that is sensitive to very small duration differences
15188	// between video and audio. For this situation, choose Match video duration
15189	// (MATCH_VIDEO_DURATION). In all other cases, keep the default value, Default
15190	// codec duration (DEFAULT_CODEC_DURATION). When you choose Match video duration,
15191	// MediaConvert pads the output audio streams with silence or trims them to
15192	// ensure that the total duration of each audio stream is at least as long as
15193	// the total duration of the video stream. After padding or trimming, the audio
15194	// stream duration is no more than one frame longer than the video stream. MediaConvert
15195	// applies audio padding or trimming only to the end of the last segment of
15196	// the output. For unsegmented outputs, MediaConvert adds padding only to the
15197	// end of the file. When you keep the default value, any minor discrepancies
15198	// between audio and video duration will depend on your output audio codec.
15199	AudioDuration *string `locationName:"audioDuration" type:"string" enum:"MpdAudioDuration"`
15200
15201	// Use this setting only in DASH output groups that include sidecar TTML or
15202	// IMSC captions. You specify sidecar captions in a separate output from your
15203	// audio and video. Choose Raw (RAW) for captions in a single XML file in a
15204	// raw container. Choose Fragmented MPEG-4 (FRAGMENTED_MP4) for captions in
15205	// XML format contained within fragmented MP4 files. This set of fragmented
15206	// MP4 files is separate from your video and audio fragmented MP4 files.
15207	CaptionContainerType *string `locationName:"captionContainerType" type:"string" enum:"MpdCaptionContainerType"`
15208
15209	// Use this setting only when you specify SCTE-35 markers from ESAM. Choose
15210	// INSERT to put SCTE-35 markers in this output at the insertion points that
15211	// you specify in an ESAM XML document. Provide the document in the setting
15212	// SCC XML (sccXml).
15213	Scte35Esam *string `locationName:"scte35Esam" type:"string" enum:"MpdScte35Esam"`
15214
15215	// Ignore this setting unless you have SCTE-35 markers in your input video file.
15216	// Choose Passthrough (PASSTHROUGH) if you want SCTE-35 markers that appear
15217	// in your input to also appear in this output. Choose None (NONE) if you don't
15218	// want those SCTE-35 markers in this output.
15219	Scte35Source *string `locationName:"scte35Source" type:"string" enum:"MpdScte35Source"`
15220}
15221
15222// String returns the string representation
15223func (s MpdSettings) String() string {
15224	return awsutil.Prettify(s)
15225}
15226
15227// GoString returns the string representation
15228func (s MpdSettings) GoString() string {
15229	return s.String()
15230}
15231
15232// SetAccessibilityCaptionHints sets the AccessibilityCaptionHints field's value.
15233func (s *MpdSettings) SetAccessibilityCaptionHints(v string) *MpdSettings {
15234	s.AccessibilityCaptionHints = &v
15235	return s
15236}
15237
15238// SetAudioDuration sets the AudioDuration field's value.
15239func (s *MpdSettings) SetAudioDuration(v string) *MpdSettings {
15240	s.AudioDuration = &v
15241	return s
15242}
15243
15244// SetCaptionContainerType sets the CaptionContainerType field's value.
15245func (s *MpdSettings) SetCaptionContainerType(v string) *MpdSettings {
15246	s.CaptionContainerType = &v
15247	return s
15248}
15249
15250// SetScte35Esam sets the Scte35Esam field's value.
15251func (s *MpdSettings) SetScte35Esam(v string) *MpdSettings {
15252	s.Scte35Esam = &v
15253	return s
15254}
15255
15256// SetScte35Source sets the Scte35Source field's value.
15257func (s *MpdSettings) SetScte35Source(v string) *MpdSettings {
15258	s.Scte35Source = &v
15259	return s
15260}
15261
15262// Required when you set (Codec) under (VideoDescription)>(CodecSettings) to
15263// the value MPEG2.
15264type Mpeg2Settings struct {
15265	_ struct{} `type:"structure"`
15266
15267	// Specify the strength of any adaptive quantization filters that you enable.
15268	// The value that you choose here applies to the following settings: Spatial
15269	// adaptive quantization (spatialAdaptiveQuantization), and Temporal adaptive
15270	// quantization (temporalAdaptiveQuantization).
15271	AdaptiveQuantization *string `locationName:"adaptiveQuantization" type:"string" enum:"Mpeg2AdaptiveQuantization"`
15272
15273	// Specify the average bitrate in bits per second. Required for VBR and CBR.
15274	// For MS Smooth outputs, bitrates must be unique when rounded down to the nearest
15275	// multiple of 1000.
15276	Bitrate *int64 `locationName:"bitrate" min:"1000" type:"integer"`
15277
15278	// Use Level (Mpeg2CodecLevel) to set the MPEG-2 level for the video output.
15279	CodecLevel *string `locationName:"codecLevel" type:"string" enum:"Mpeg2CodecLevel"`
15280
15281	// Use Profile (Mpeg2CodecProfile) to set the MPEG-2 profile for the video output.
15282	CodecProfile *string `locationName:"codecProfile" type:"string" enum:"Mpeg2CodecProfile"`
15283
15284	// Choose Adaptive to improve subjective video quality for high-motion content.
15285	// This will cause the service to use fewer B-frames (which infer information
15286	// based on other frames) for high-motion portions of the video and more B-frames
15287	// for low-motion portions. The maximum number of B-frames is limited by the
15288	// value you provide for the setting B frames between reference frames (numberBFramesBetweenReferenceFrames).
15289	DynamicSubGop *string `locationName:"dynamicSubGop" type:"string" enum:"Mpeg2DynamicSubGop"`
15290
15291	// If you are using the console, use the Framerate setting to specify the frame
15292	// rate for this output. If you want to keep the same frame rate as the input
15293	// video, choose Follow source. If you want to do frame rate conversion, choose
15294	// a frame rate from the dropdown list or choose Custom. The framerates shown
15295	// in the dropdown list are decimal approximations of fractions. If you choose
15296	// Custom, specify your frame rate as a fraction. If you are creating your transcoding
15297	// job specification as a JSON file without the console, use FramerateControl
15298	// to specify which value the service uses for the frame rate for this output.
15299	// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
15300	// from the input. Choose SPECIFIED if you want the service to use the frame
15301	// rate you specify in the settings FramerateNumerator and FramerateDenominator.
15302	FramerateControl *string `locationName:"framerateControl" type:"string" enum:"Mpeg2FramerateControl"`
15303
15304	// Choose the method that you want MediaConvert to use when increasing or decreasing
15305	// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
15306	// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
15307	// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
15308	// smooth picture, but might introduce undesirable video artifacts. For complex
15309	// frame rate conversions, especially if your source video has already been
15310	// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
15311	// motion-compensated interpolation. FrameFormer chooses the best conversion
15312	// method frame by frame. Note that using FrameFormer increases the transcoding
15313	// time and incurs a significant add-on cost.
15314	FramerateConversionAlgorithm *string `locationName:"framerateConversionAlgorithm" type:"string" enum:"Mpeg2FramerateConversionAlgorithm"`
15315
15316	// When you use the API for transcode jobs that use frame rate conversion, specify
15317	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
15318	// FramerateDenominator to specify the denominator of this fraction. In this
15319	// example, use 1001 for the value of FramerateDenominator. When you use the
15320	// console for transcode jobs that use frame rate conversion, provide the value
15321	// as a decimal number for Framerate. In this example, specify 23.976.
15322	FramerateDenominator *int64 `locationName:"framerateDenominator" min:"1" type:"integer"`
15323
15324	// When you use the API for transcode jobs that use frame rate conversion, specify
15325	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
15326	// FramerateNumerator to specify the numerator of this fraction. In this example,
15327	// use 24000 for the value of FramerateNumerator. When you use the console for
15328	// transcode jobs that use frame rate conversion, provide the value as a decimal
15329	// number for Framerate. In this example, specify 23.976.
15330	FramerateNumerator *int64 `locationName:"framerateNumerator" min:"24" type:"integer"`
15331
15332	// Frequency of closed GOPs. In streaming applications, it is recommended that
15333	// this be set to 1 so a decoder joining mid-stream will receive an IDR frame
15334	// as quickly as possible. Setting this value to 0 will break output segmenting.
15335	GopClosedCadence *int64 `locationName:"gopClosedCadence" type:"integer"`
15336
15337	// GOP Length (keyframe interval) in frames or seconds. Must be greater than
15338	// zero.
15339	GopSize *float64 `locationName:"gopSize" type:"double"`
15340
15341	// Indicates if the GOP Size in MPEG2 is specified in frames or seconds. If
15342	// seconds the system will convert the GOP Size into a frame count at run time.
15343	GopSizeUnits *string `locationName:"gopSizeUnits" type:"string" enum:"Mpeg2GopSizeUnits"`
15344
15345	// Percentage of the buffer that should initially be filled (HRD buffer model).
15346	HrdBufferInitialFillPercentage *int64 `locationName:"hrdBufferInitialFillPercentage" type:"integer"`
15347
15348	// Size of buffer (HRD buffer model) in bits. For example, enter five megabits
15349	// as 5000000.
15350	HrdBufferSize *int64 `locationName:"hrdBufferSize" type:"integer"`
15351
15352	// Choose the scan line type for the output. Keep the default value, Progressive
15353	// (PROGRESSIVE) to create a progressive output, regardless of the scan type
15354	// of your input. Use Top field first (TOP_FIELD) or Bottom field first (BOTTOM_FIELD)
15355	// to create an output that's interlaced with the same field polarity throughout.
15356	// Use Follow, default top (FOLLOW_TOP_FIELD) or Follow, default bottom (FOLLOW_BOTTOM_FIELD)
15357	// to produce outputs with the same field polarity as the source. For jobs that
15358	// have multiple inputs, the output field polarity might change over the course
15359	// of the output. Follow behavior depends on the input scan type. If the source
15360	// is interlaced, the output will be interlaced with the same polarity as the
15361	// source. If the source is progressive, the output will be interlaced with
15362	// top field bottom field first, depending on which of the Follow options you
15363	// choose.
15364	InterlaceMode *string `locationName:"interlaceMode" type:"string" enum:"Mpeg2InterlaceMode"`
15365
15366	// Use Intra DC precision (Mpeg2IntraDcPrecision) to set quantization precision
15367	// for intra-block DC coefficients. If you choose the value auto, the service
15368	// will automatically select the precision based on the per-frame compression
15369	// ratio.
15370	IntraDcPrecision *string `locationName:"intraDcPrecision" type:"string" enum:"Mpeg2IntraDcPrecision"`
15371
15372	// Maximum bitrate in bits/second. For example, enter five megabits per second
15373	// as 5000000.
15374	MaxBitrate *int64 `locationName:"maxBitrate" min:"1000" type:"integer"`
15375
15376	// Enforces separation between repeated (cadence) I-frames and I-frames inserted
15377	// by Scene Change Detection. If a scene change I-frame is within I-interval
15378	// frames of a cadence I-frame, the GOP is shrunk and/or stretched to the scene
15379	// change I-frame. GOP stretch requires enabling lookahead as well as setting
15380	// I-interval. The normal cadence resumes for the next GOP. This setting is
15381	// only used when Scene Change Detect is enabled. Note: Maximum GOP stretch
15382	// = GOP size + Min-I-interval - 1
15383	MinIInterval *int64 `locationName:"minIInterval" type:"integer"`
15384
15385	// Number of B-frames between reference frames.
15386	NumberBFramesBetweenReferenceFrames *int64 `locationName:"numberBFramesBetweenReferenceFrames" type:"integer"`
15387
15388	// Optional. Specify how the service determines the pixel aspect ratio (PAR)
15389	// for this output. The default behavior, Follow source (INITIALIZE_FROM_SOURCE),
15390	// uses the PAR from your input video for your output. To specify a different
15391	// PAR in the console, choose any value other than Follow source. To specify
15392	// a different PAR by editing the JSON job specification, choose SPECIFIED.
15393	// When you choose SPECIFIED for this setting, you must also specify values
15394	// for the parNumerator and parDenominator settings.
15395	ParControl *string `locationName:"parControl" type:"string" enum:"Mpeg2ParControl"`
15396
15397	// Required when you set Pixel aspect ratio (parControl) to SPECIFIED. On the
15398	// console, this corresponds to any value other than Follow source. When you
15399	// specify an output pixel aspect ratio (PAR) that is different from your input
15400	// video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC
15401	// widescreen, you would specify the ratio 40:33. In this example, the value
15402	// for parDenominator is 33.
15403	ParDenominator *int64 `locationName:"parDenominator" min:"1" type:"integer"`
15404
15405	// Required when you set Pixel aspect ratio (parControl) to SPECIFIED. On the
15406	// console, this corresponds to any value other than Follow source. When you
15407	// specify an output pixel aspect ratio (PAR) that is different from your input
15408	// video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC
15409	// widescreen, you would specify the ratio 40:33. In this example, the value
15410	// for parNumerator is 40.
15411	ParNumerator *int64 `locationName:"parNumerator" min:"1" type:"integer"`
15412
15413	// Optional. Use Quality tuning level (qualityTuningLevel) to choose how you
15414	// want to trade off encoding speed for output video quality. The default behavior
15415	// is faster, lower quality, single-pass encoding.
15416	QualityTuningLevel *string `locationName:"qualityTuningLevel" type:"string" enum:"Mpeg2QualityTuningLevel"`
15417
15418	// Use Rate control mode (Mpeg2RateControlMode) to specify whether the bitrate
15419	// is variable (vbr) or constant (cbr).
15420	RateControlMode *string `locationName:"rateControlMode" type:"string" enum:"Mpeg2RateControlMode"`
15421
15422	// Use this setting for interlaced outputs, when your output frame rate is half
15423	// of your input frame rate. In this situation, choose Optimized interlacing
15424	// (INTERLACED_OPTIMIZE) to create a better quality interlaced output. In this
15425	// case, each progressive frame from the input corresponds to an interlaced
15426	// field in the output. Keep the default value, Basic interlacing (INTERLACED),
15427	// for all other output frame rates. With basic interlacing, MediaConvert performs
15428	// any frame rate conversion first and then interlaces the frames. When you
15429	// choose Optimized interlacing and you set your output frame rate to a value
15430	// that isn't suitable for optimized interlacing, MediaConvert automatically
15431	// falls back to basic interlacing. Required settings: To use optimized interlacing,
15432	// you must set Telecine (telecine) to None (NONE) or Soft (SOFT). You can't
15433	// use optimized interlacing for hard telecine outputs. You must also set Interlace
15434	// mode (interlaceMode) to a value other than Progressive (PROGRESSIVE).
15435	ScanTypeConversionMode *string `locationName:"scanTypeConversionMode" type:"string" enum:"Mpeg2ScanTypeConversionMode"`
15436
15437	// Enable this setting to insert I-frames at scene changes that the service
15438	// automatically detects. This improves video quality and is enabled by default.
15439	SceneChangeDetect *string `locationName:"sceneChangeDetect" type:"string" enum:"Mpeg2SceneChangeDetect"`
15440
15441	// Ignore this setting unless your input frame rate is 23.976 or 24 frames per
15442	// second (fps). Enable slow PAL to create a 25 fps output. When you enable
15443	// slow PAL, MediaConvert relabels the video frames to 25 fps and resamples
15444	// your audio to keep it synchronized with the video. Note that enabling this
15445	// setting will slightly reduce the duration of your video. Required settings:
15446	// You must also set Framerate to 25. In your JSON job specification, set (framerateControl)
15447	// to (SPECIFIED), (framerateNumerator) to 25 and (framerateDenominator) to
15448	// 1.
15449	SlowPal *string `locationName:"slowPal" type:"string" enum:"Mpeg2SlowPal"`
15450
15451	// Ignore this setting unless you need to comply with a specification that requires
15452	// a specific value. If you don't have a specification requirement, we recommend
15453	// that you adjust the softness of your output by using a lower value for the
15454	// setting Sharpness (sharpness) or by enabling a noise reducer filter (noiseReducerFilter).
15455	// The Softness (softness) setting specifies the quantization matrices that
15456	// the encoder uses. Keep the default value, 0, to use the AWS Elemental default
15457	// matrices. Choose a value from 17 to 128 to use planar interpolation. Increasing
15458	// values from 17 to 128 result in increasing reduction of high-frequency data.
15459	// The value 128 results in the softest video.
15460	Softness *int64 `locationName:"softness" type:"integer"`
15461
15462	// Keep the default value, Enabled (ENABLED), to adjust quantization within
15463	// each frame based on spatial variation of content complexity. When you enable
15464	// this feature, the encoder uses fewer bits on areas that can sustain more
15465	// distortion with no noticeable visual degradation and uses more bits on areas
15466	// where any small distortion will be noticeable. For example, complex textured
15467	// blocks are encoded with fewer bits and smooth textured blocks are encoded
15468	// with more bits. Enabling this feature will almost always improve your video
15469	// quality. Note, though, that this feature doesn't take into account where
15470	// the viewer's attention is likely to be. If viewers are likely to be focusing
15471	// their attention on a part of the screen with a lot of complex texture, you
15472	// might choose to disable this feature. Related setting: When you enable spatial
15473	// adaptive quantization, set the value for Adaptive quantization (adaptiveQuantization)
15474	// depending on your content. For homogeneous content, such as cartoons and
15475	// video games, set it to Low. For content with a wider variety of textures,
15476	// set it to High or Higher.
15477	SpatialAdaptiveQuantization *string `locationName:"spatialAdaptiveQuantization" type:"string" enum:"Mpeg2SpatialAdaptiveQuantization"`
15478
15479	// Specify whether this output's video uses the D10 syntax. Keep the default
15480	// value to not use the syntax. Related settings: When you choose D10 (D_10)
15481	// for your MXF profile (profile), you must also set this value to to D10 (D_10).
15482	Syntax *string `locationName:"syntax" type:"string" enum:"Mpeg2Syntax"`
15483
15484	// When you do frame rate conversion from 23.976 frames per second (fps) to
15485	// 29.97 fps, and your output scan type is interlaced, you can optionally enable
15486	// hard or soft telecine to create a smoother picture. Hard telecine (HARD)
15487	// produces a 29.97i output. Soft telecine (SOFT) produces an output with a
15488	// 23.976 output that signals to the video player device to do the conversion
15489	// during play back. When you keep the default value, None (NONE), MediaConvert
15490	// does a standard frame rate conversion to 29.97 without doing anything with
15491	// the field polarity to create a smoother picture.
15492	Telecine *string `locationName:"telecine" type:"string" enum:"Mpeg2Telecine"`
15493
15494	// Keep the default value, Enabled (ENABLED), to adjust quantization within
15495	// each frame based on temporal variation of content complexity. When you enable
15496	// this feature, the encoder uses fewer bits on areas of the frame that aren't
15497	// moving and uses more bits on complex objects with sharp edges that move a
15498	// lot. For example, this feature improves the readability of text tickers on
15499	// newscasts and scoreboards on sports matches. Enabling this feature will almost
15500	// always improve your video quality. Note, though, that this feature doesn't
15501	// take into account where the viewer's attention is likely to be. If viewers
15502	// are likely to be focusing their attention on a part of the screen that doesn't
15503	// have moving objects with sharp edges, such as sports athletes' faces, you
15504	// might choose to disable this feature. Related setting: When you enable temporal
15505	// quantization, adjust the strength of the filter with the setting Adaptive
15506	// quantization (adaptiveQuantization).
15507	TemporalAdaptiveQuantization *string `locationName:"temporalAdaptiveQuantization" type:"string" enum:"Mpeg2TemporalAdaptiveQuantization"`
15508}
15509
15510// String returns the string representation
15511func (s Mpeg2Settings) String() string {
15512	return awsutil.Prettify(s)
15513}
15514
15515// GoString returns the string representation
15516func (s Mpeg2Settings) GoString() string {
15517	return s.String()
15518}
15519
15520// Validate inspects the fields of the type to determine if they are valid.
15521func (s *Mpeg2Settings) Validate() error {
15522	invalidParams := request.ErrInvalidParams{Context: "Mpeg2Settings"}
15523	if s.Bitrate != nil && *s.Bitrate < 1000 {
15524		invalidParams.Add(request.NewErrParamMinValue("Bitrate", 1000))
15525	}
15526	if s.FramerateDenominator != nil && *s.FramerateDenominator < 1 {
15527		invalidParams.Add(request.NewErrParamMinValue("FramerateDenominator", 1))
15528	}
15529	if s.FramerateNumerator != nil && *s.FramerateNumerator < 24 {
15530		invalidParams.Add(request.NewErrParamMinValue("FramerateNumerator", 24))
15531	}
15532	if s.MaxBitrate != nil && *s.MaxBitrate < 1000 {
15533		invalidParams.Add(request.NewErrParamMinValue("MaxBitrate", 1000))
15534	}
15535	if s.ParDenominator != nil && *s.ParDenominator < 1 {
15536		invalidParams.Add(request.NewErrParamMinValue("ParDenominator", 1))
15537	}
15538	if s.ParNumerator != nil && *s.ParNumerator < 1 {
15539		invalidParams.Add(request.NewErrParamMinValue("ParNumerator", 1))
15540	}
15541
15542	if invalidParams.Len() > 0 {
15543		return invalidParams
15544	}
15545	return nil
15546}
15547
15548// SetAdaptiveQuantization sets the AdaptiveQuantization field's value.
15549func (s *Mpeg2Settings) SetAdaptiveQuantization(v string) *Mpeg2Settings {
15550	s.AdaptiveQuantization = &v
15551	return s
15552}
15553
15554// SetBitrate sets the Bitrate field's value.
15555func (s *Mpeg2Settings) SetBitrate(v int64) *Mpeg2Settings {
15556	s.Bitrate = &v
15557	return s
15558}
15559
15560// SetCodecLevel sets the CodecLevel field's value.
15561func (s *Mpeg2Settings) SetCodecLevel(v string) *Mpeg2Settings {
15562	s.CodecLevel = &v
15563	return s
15564}
15565
15566// SetCodecProfile sets the CodecProfile field's value.
15567func (s *Mpeg2Settings) SetCodecProfile(v string) *Mpeg2Settings {
15568	s.CodecProfile = &v
15569	return s
15570}
15571
15572// SetDynamicSubGop sets the DynamicSubGop field's value.
15573func (s *Mpeg2Settings) SetDynamicSubGop(v string) *Mpeg2Settings {
15574	s.DynamicSubGop = &v
15575	return s
15576}
15577
15578// SetFramerateControl sets the FramerateControl field's value.
15579func (s *Mpeg2Settings) SetFramerateControl(v string) *Mpeg2Settings {
15580	s.FramerateControl = &v
15581	return s
15582}
15583
15584// SetFramerateConversionAlgorithm sets the FramerateConversionAlgorithm field's value.
15585func (s *Mpeg2Settings) SetFramerateConversionAlgorithm(v string) *Mpeg2Settings {
15586	s.FramerateConversionAlgorithm = &v
15587	return s
15588}
15589
15590// SetFramerateDenominator sets the FramerateDenominator field's value.
15591func (s *Mpeg2Settings) SetFramerateDenominator(v int64) *Mpeg2Settings {
15592	s.FramerateDenominator = &v
15593	return s
15594}
15595
15596// SetFramerateNumerator sets the FramerateNumerator field's value.
15597func (s *Mpeg2Settings) SetFramerateNumerator(v int64) *Mpeg2Settings {
15598	s.FramerateNumerator = &v
15599	return s
15600}
15601
15602// SetGopClosedCadence sets the GopClosedCadence field's value.
15603func (s *Mpeg2Settings) SetGopClosedCadence(v int64) *Mpeg2Settings {
15604	s.GopClosedCadence = &v
15605	return s
15606}
15607
15608// SetGopSize sets the GopSize field's value.
15609func (s *Mpeg2Settings) SetGopSize(v float64) *Mpeg2Settings {
15610	s.GopSize = &v
15611	return s
15612}
15613
15614// SetGopSizeUnits sets the GopSizeUnits field's value.
15615func (s *Mpeg2Settings) SetGopSizeUnits(v string) *Mpeg2Settings {
15616	s.GopSizeUnits = &v
15617	return s
15618}
15619
15620// SetHrdBufferInitialFillPercentage sets the HrdBufferInitialFillPercentage field's value.
15621func (s *Mpeg2Settings) SetHrdBufferInitialFillPercentage(v int64) *Mpeg2Settings {
15622	s.HrdBufferInitialFillPercentage = &v
15623	return s
15624}
15625
15626// SetHrdBufferSize sets the HrdBufferSize field's value.
15627func (s *Mpeg2Settings) SetHrdBufferSize(v int64) *Mpeg2Settings {
15628	s.HrdBufferSize = &v
15629	return s
15630}
15631
15632// SetInterlaceMode sets the InterlaceMode field's value.
15633func (s *Mpeg2Settings) SetInterlaceMode(v string) *Mpeg2Settings {
15634	s.InterlaceMode = &v
15635	return s
15636}
15637
15638// SetIntraDcPrecision sets the IntraDcPrecision field's value.
15639func (s *Mpeg2Settings) SetIntraDcPrecision(v string) *Mpeg2Settings {
15640	s.IntraDcPrecision = &v
15641	return s
15642}
15643
15644// SetMaxBitrate sets the MaxBitrate field's value.
15645func (s *Mpeg2Settings) SetMaxBitrate(v int64) *Mpeg2Settings {
15646	s.MaxBitrate = &v
15647	return s
15648}
15649
15650// SetMinIInterval sets the MinIInterval field's value.
15651func (s *Mpeg2Settings) SetMinIInterval(v int64) *Mpeg2Settings {
15652	s.MinIInterval = &v
15653	return s
15654}
15655
15656// SetNumberBFramesBetweenReferenceFrames sets the NumberBFramesBetweenReferenceFrames field's value.
15657func (s *Mpeg2Settings) SetNumberBFramesBetweenReferenceFrames(v int64) *Mpeg2Settings {
15658	s.NumberBFramesBetweenReferenceFrames = &v
15659	return s
15660}
15661
15662// SetParControl sets the ParControl field's value.
15663func (s *Mpeg2Settings) SetParControl(v string) *Mpeg2Settings {
15664	s.ParControl = &v
15665	return s
15666}
15667
15668// SetParDenominator sets the ParDenominator field's value.
15669func (s *Mpeg2Settings) SetParDenominator(v int64) *Mpeg2Settings {
15670	s.ParDenominator = &v
15671	return s
15672}
15673
15674// SetParNumerator sets the ParNumerator field's value.
15675func (s *Mpeg2Settings) SetParNumerator(v int64) *Mpeg2Settings {
15676	s.ParNumerator = &v
15677	return s
15678}
15679
15680// SetQualityTuningLevel sets the QualityTuningLevel field's value.
15681func (s *Mpeg2Settings) SetQualityTuningLevel(v string) *Mpeg2Settings {
15682	s.QualityTuningLevel = &v
15683	return s
15684}
15685
15686// SetRateControlMode sets the RateControlMode field's value.
15687func (s *Mpeg2Settings) SetRateControlMode(v string) *Mpeg2Settings {
15688	s.RateControlMode = &v
15689	return s
15690}
15691
15692// SetScanTypeConversionMode sets the ScanTypeConversionMode field's value.
15693func (s *Mpeg2Settings) SetScanTypeConversionMode(v string) *Mpeg2Settings {
15694	s.ScanTypeConversionMode = &v
15695	return s
15696}
15697
15698// SetSceneChangeDetect sets the SceneChangeDetect field's value.
15699func (s *Mpeg2Settings) SetSceneChangeDetect(v string) *Mpeg2Settings {
15700	s.SceneChangeDetect = &v
15701	return s
15702}
15703
15704// SetSlowPal sets the SlowPal field's value.
15705func (s *Mpeg2Settings) SetSlowPal(v string) *Mpeg2Settings {
15706	s.SlowPal = &v
15707	return s
15708}
15709
15710// SetSoftness sets the Softness field's value.
15711func (s *Mpeg2Settings) SetSoftness(v int64) *Mpeg2Settings {
15712	s.Softness = &v
15713	return s
15714}
15715
15716// SetSpatialAdaptiveQuantization sets the SpatialAdaptiveQuantization field's value.
15717func (s *Mpeg2Settings) SetSpatialAdaptiveQuantization(v string) *Mpeg2Settings {
15718	s.SpatialAdaptiveQuantization = &v
15719	return s
15720}
15721
15722// SetSyntax sets the Syntax field's value.
15723func (s *Mpeg2Settings) SetSyntax(v string) *Mpeg2Settings {
15724	s.Syntax = &v
15725	return s
15726}
15727
15728// SetTelecine sets the Telecine field's value.
15729func (s *Mpeg2Settings) SetTelecine(v string) *Mpeg2Settings {
15730	s.Telecine = &v
15731	return s
15732}
15733
15734// SetTemporalAdaptiveQuantization sets the TemporalAdaptiveQuantization field's value.
15735func (s *Mpeg2Settings) SetTemporalAdaptiveQuantization(v string) *Mpeg2Settings {
15736	s.TemporalAdaptiveQuantization = &v
15737	return s
15738}
15739
15740// Specify the details for each additional Microsoft Smooth Streaming manifest
15741// that you want the service to generate for this output group. Each manifest
15742// can reference a different subset of outputs in the group.
15743type MsSmoothAdditionalManifest struct {
15744	_ struct{} `type:"structure"`
15745
15746	// Specify a name modifier that the service adds to the name of this manifest
15747	// to make it different from the file names of the other main manifests in the
15748	// output group. For example, say that the default main manifest for your Microsoft
15749	// Smooth group is film-name.ismv. If you enter "-no-premium" for this setting,
15750	// then the file name the service generates for this top-level manifest is film-name-no-premium.ismv.
15751	ManifestNameModifier *string `locationName:"manifestNameModifier" min:"1" type:"string"`
15752
15753	// Specify the outputs that you want this additional top-level manifest to reference.
15754	SelectedOutputs []*string `locationName:"selectedOutputs" type:"list"`
15755}
15756
15757// String returns the string representation
15758func (s MsSmoothAdditionalManifest) String() string {
15759	return awsutil.Prettify(s)
15760}
15761
15762// GoString returns the string representation
15763func (s MsSmoothAdditionalManifest) GoString() string {
15764	return s.String()
15765}
15766
15767// Validate inspects the fields of the type to determine if they are valid.
15768func (s *MsSmoothAdditionalManifest) Validate() error {
15769	invalidParams := request.ErrInvalidParams{Context: "MsSmoothAdditionalManifest"}
15770	if s.ManifestNameModifier != nil && len(*s.ManifestNameModifier) < 1 {
15771		invalidParams.Add(request.NewErrParamMinLen("ManifestNameModifier", 1))
15772	}
15773
15774	if invalidParams.Len() > 0 {
15775		return invalidParams
15776	}
15777	return nil
15778}
15779
15780// SetManifestNameModifier sets the ManifestNameModifier field's value.
15781func (s *MsSmoothAdditionalManifest) SetManifestNameModifier(v string) *MsSmoothAdditionalManifest {
15782	s.ManifestNameModifier = &v
15783	return s
15784}
15785
15786// SetSelectedOutputs sets the SelectedOutputs field's value.
15787func (s *MsSmoothAdditionalManifest) SetSelectedOutputs(v []*string) *MsSmoothAdditionalManifest {
15788	s.SelectedOutputs = v
15789	return s
15790}
15791
15792// If you are using DRM, set DRM System (MsSmoothEncryptionSettings) to specify
15793// the value SpekeKeyProvider.
15794type MsSmoothEncryptionSettings struct {
15795	_ struct{} `type:"structure"`
15796
15797	// If your output group type is HLS, DASH, or Microsoft Smooth, use these settings
15798	// when doing DRM encryption with a SPEKE-compliant key provider. If your output
15799	// group type is CMAF, use the SpekeKeyProviderCmaf settings instead.
15800	SpekeKeyProvider *SpekeKeyProvider `locationName:"spekeKeyProvider" type:"structure"`
15801}
15802
15803// String returns the string representation
15804func (s MsSmoothEncryptionSettings) String() string {
15805	return awsutil.Prettify(s)
15806}
15807
15808// GoString returns the string representation
15809func (s MsSmoothEncryptionSettings) GoString() string {
15810	return s.String()
15811}
15812
15813// SetSpekeKeyProvider sets the SpekeKeyProvider field's value.
15814func (s *MsSmoothEncryptionSettings) SetSpekeKeyProvider(v *SpekeKeyProvider) *MsSmoothEncryptionSettings {
15815	s.SpekeKeyProvider = v
15816	return s
15817}
15818
15819// Required when you set (Type) under (OutputGroups)>(OutputGroupSettings) to
15820// MS_SMOOTH_GROUP_SETTINGS.
15821type MsSmoothGroupSettings struct {
15822	_ struct{} `type:"structure"`
15823
15824	// By default, the service creates one .ism Microsoft Smooth Streaming manifest
15825	// for each Microsoft Smooth Streaming output group in your job. This default
15826	// manifest references every output in the output group. To create additional
15827	// manifests that reference a subset of the outputs in the output group, specify
15828	// a list of them here.
15829	AdditionalManifests []*MsSmoothAdditionalManifest `locationName:"additionalManifests" type:"list"`
15830
15831	// COMBINE_DUPLICATE_STREAMS combines identical audio encoding settings across
15832	// a Microsoft Smooth output group into a single audio stream.
15833	AudioDeduplication *string `locationName:"audioDeduplication" type:"string" enum:"MsSmoothAudioDeduplication"`
15834
15835	// Use Destination (Destination) to specify the S3 output location and the output
15836	// filename base. Destination accepts format identifiers. If you do not specify
15837	// the base filename in the URI, the service will use the filename of the input
15838	// file. If your job has multiple inputs, the service uses the filename of the
15839	// first input file.
15840	Destination *string `locationName:"destination" type:"string"`
15841
15842	// Settings associated with the destination. Will vary based on the type of
15843	// destination
15844	DestinationSettings *DestinationSettings `locationName:"destinationSettings" type:"structure"`
15845
15846	// If you are using DRM, set DRM System (MsSmoothEncryptionSettings) to specify
15847	// the value SpekeKeyProvider.
15848	Encryption *MsSmoothEncryptionSettings `locationName:"encryption" type:"structure"`
15849
15850	// Use Fragment length (FragmentLength) to specify the mp4 fragment sizes in
15851	// seconds. Fragment length must be compatible with GOP size and frame rate.
15852	FragmentLength *int64 `locationName:"fragmentLength" min:"1" type:"integer"`
15853
15854	// Use Manifest encoding (MsSmoothManifestEncoding) to specify the encoding
15855	// format for the server and client manifest. Valid options are utf8 and utf16.
15856	ManifestEncoding *string `locationName:"manifestEncoding" type:"string" enum:"MsSmoothManifestEncoding"`
15857}
15858
15859// String returns the string representation
15860func (s MsSmoothGroupSettings) String() string {
15861	return awsutil.Prettify(s)
15862}
15863
15864// GoString returns the string representation
15865func (s MsSmoothGroupSettings) GoString() string {
15866	return s.String()
15867}
15868
15869// Validate inspects the fields of the type to determine if they are valid.
15870func (s *MsSmoothGroupSettings) Validate() error {
15871	invalidParams := request.ErrInvalidParams{Context: "MsSmoothGroupSettings"}
15872	if s.FragmentLength != nil && *s.FragmentLength < 1 {
15873		invalidParams.Add(request.NewErrParamMinValue("FragmentLength", 1))
15874	}
15875	if s.AdditionalManifests != nil {
15876		for i, v := range s.AdditionalManifests {
15877			if v == nil {
15878				continue
15879			}
15880			if err := v.Validate(); err != nil {
15881				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "AdditionalManifests", i), err.(request.ErrInvalidParams))
15882			}
15883		}
15884	}
15885
15886	if invalidParams.Len() > 0 {
15887		return invalidParams
15888	}
15889	return nil
15890}
15891
15892// SetAdditionalManifests sets the AdditionalManifests field's value.
15893func (s *MsSmoothGroupSettings) SetAdditionalManifests(v []*MsSmoothAdditionalManifest) *MsSmoothGroupSettings {
15894	s.AdditionalManifests = v
15895	return s
15896}
15897
15898// SetAudioDeduplication sets the AudioDeduplication field's value.
15899func (s *MsSmoothGroupSettings) SetAudioDeduplication(v string) *MsSmoothGroupSettings {
15900	s.AudioDeduplication = &v
15901	return s
15902}
15903
15904// SetDestination sets the Destination field's value.
15905func (s *MsSmoothGroupSettings) SetDestination(v string) *MsSmoothGroupSettings {
15906	s.Destination = &v
15907	return s
15908}
15909
15910// SetDestinationSettings sets the DestinationSettings field's value.
15911func (s *MsSmoothGroupSettings) SetDestinationSettings(v *DestinationSettings) *MsSmoothGroupSettings {
15912	s.DestinationSettings = v
15913	return s
15914}
15915
15916// SetEncryption sets the Encryption field's value.
15917func (s *MsSmoothGroupSettings) SetEncryption(v *MsSmoothEncryptionSettings) *MsSmoothGroupSettings {
15918	s.Encryption = v
15919	return s
15920}
15921
15922// SetFragmentLength sets the FragmentLength field's value.
15923func (s *MsSmoothGroupSettings) SetFragmentLength(v int64) *MsSmoothGroupSettings {
15924	s.FragmentLength = &v
15925	return s
15926}
15927
15928// SetManifestEncoding sets the ManifestEncoding field's value.
15929func (s *MsSmoothGroupSettings) SetManifestEncoding(v string) *MsSmoothGroupSettings {
15930	s.ManifestEncoding = &v
15931	return s
15932}
15933
15934// MXF settings
15935type MxfSettings struct {
15936	_ struct{} `type:"structure"`
15937
15938	// Optional. When you have AFD signaling set up in your output video stream,
15939	// use this setting to choose whether to also include it in the MXF wrapper.
15940	// Choose Don't copy (NO_COPY) to exclude AFD signaling from the MXF wrapper.
15941	// Choose Copy from video stream (COPY_FROM_VIDEO) to copy the AFD values from
15942	// the video stream for this output to the MXF wrapper. Regardless of which
15943	// option you choose, the AFD values remain in the video stream. Related settings:
15944	// To set up your output to include or exclude AFD values, see AfdSignaling,
15945	// under VideoDescription. On the console, find AFD signaling under the output's
15946	// video encoding settings.
15947	AfdSignaling *string `locationName:"afdSignaling" type:"string" enum:"MxfAfdSignaling"`
15948
15949	// Specify the MXF profile, also called shim, for this output. When you choose
15950	// Auto, MediaConvert chooses a profile based on the video codec and resolution.
15951	// For a list of codecs supported with each MXF profile, see https://docs.aws.amazon.com/mediaconvert/latest/ug/codecs-supported-with-each-mxf-profile.html.
15952	// For more information about the automatic selection behavior, see https://docs.aws.amazon.com/mediaconvert/latest/ug/default-automatic-selection-of-mxf-profiles.html.
15953	Profile *string `locationName:"profile" type:"string" enum:"MxfProfile"`
15954}
15955
15956// String returns the string representation
15957func (s MxfSettings) String() string {
15958	return awsutil.Prettify(s)
15959}
15960
15961// GoString returns the string representation
15962func (s MxfSettings) GoString() string {
15963	return s.String()
15964}
15965
15966// SetAfdSignaling sets the AfdSignaling field's value.
15967func (s *MxfSettings) SetAfdSignaling(v string) *MxfSettings {
15968	s.AfdSignaling = &v
15969	return s
15970}
15971
15972// SetProfile sets the Profile field's value.
15973func (s *MxfSettings) SetProfile(v string) *MxfSettings {
15974	s.Profile = &v
15975	return s
15976}
15977
15978// For forensic video watermarking, MediaConvert supports Nagra NexGuard File
15979// Marker watermarking. MediaConvert supports both PreRelease Content (NGPR/G2)
15980// and OTT Streaming workflows.
15981type NexGuardFileMarkerSettings struct {
15982	_ struct{} `type:"structure"`
15983
15984	// Use the base64 license string that Nagra provides you. Enter it directly
15985	// in your JSON job specification or in the console. Required when you include
15986	// Nagra NexGuard File Marker watermarking (NexGuardWatermarkingSettings) in
15987	// your job.
15988	License *string `locationName:"license" min:"1" type:"string"`
15989
15990	// Specify the payload ID that you want associated with this output. Valid values
15991	// vary depending on your Nagra NexGuard forensic watermarking workflow. Required
15992	// when you include Nagra NexGuard File Marker watermarking (NexGuardWatermarkingSettings)
15993	// in your job. For PreRelease Content (NGPR/G2), specify an integer from 1
15994	// through 4,194,303. You must generate a unique ID for each asset you watermark,
15995	// and keep a record of which ID you have assigned to each asset. Neither Nagra
15996	// nor MediaConvert keep track of the relationship between output files and
15997	// your IDs. For OTT Streaming, create two adaptive bitrate (ABR) stacks for
15998	// each asset. Do this by setting up two output groups. For one output group,
15999	// set the value of Payload ID (payload) to 0 in every output. For the other
16000	// output group, set Payload ID (payload) to 1 in every output.
16001	Payload *int64 `locationName:"payload" type:"integer"`
16002
16003	// Enter one of the watermarking preset strings that Nagra provides you. Required
16004	// when you include Nagra NexGuard File Marker watermarking (NexGuardWatermarkingSettings)
16005	// in your job.
16006	Preset *string `locationName:"preset" min:"1" type:"string"`
16007
16008	// Optional. Ignore this setting unless Nagra support directs you to specify
16009	// a value. When you don't specify a value here, the Nagra NexGuard library
16010	// uses its default value.
16011	Strength *string `locationName:"strength" type:"string" enum:"WatermarkingStrength"`
16012}
16013
16014// String returns the string representation
16015func (s NexGuardFileMarkerSettings) String() string {
16016	return awsutil.Prettify(s)
16017}
16018
16019// GoString returns the string representation
16020func (s NexGuardFileMarkerSettings) GoString() string {
16021	return s.String()
16022}
16023
16024// Validate inspects the fields of the type to determine if they are valid.
16025func (s *NexGuardFileMarkerSettings) Validate() error {
16026	invalidParams := request.ErrInvalidParams{Context: "NexGuardFileMarkerSettings"}
16027	if s.License != nil && len(*s.License) < 1 {
16028		invalidParams.Add(request.NewErrParamMinLen("License", 1))
16029	}
16030	if s.Preset != nil && len(*s.Preset) < 1 {
16031		invalidParams.Add(request.NewErrParamMinLen("Preset", 1))
16032	}
16033
16034	if invalidParams.Len() > 0 {
16035		return invalidParams
16036	}
16037	return nil
16038}
16039
16040// SetLicense sets the License field's value.
16041func (s *NexGuardFileMarkerSettings) SetLicense(v string) *NexGuardFileMarkerSettings {
16042	s.License = &v
16043	return s
16044}
16045
16046// SetPayload sets the Payload field's value.
16047func (s *NexGuardFileMarkerSettings) SetPayload(v int64) *NexGuardFileMarkerSettings {
16048	s.Payload = &v
16049	return s
16050}
16051
16052// SetPreset sets the Preset field's value.
16053func (s *NexGuardFileMarkerSettings) SetPreset(v string) *NexGuardFileMarkerSettings {
16054	s.Preset = &v
16055	return s
16056}
16057
16058// SetStrength sets the Strength field's value.
16059func (s *NexGuardFileMarkerSettings) SetStrength(v string) *NexGuardFileMarkerSettings {
16060	s.Strength = &v
16061	return s
16062}
16063
16064// Settings for your Nielsen configuration. If you don't do Nielsen measurement
16065// and analytics, ignore these settings. When you enable Nielsen configuration
16066// (nielsenConfiguration), MediaConvert enables PCM to ID3 tagging for all outputs
16067// in the job. To enable Nielsen configuration programmatically, include an
16068// instance of nielsenConfiguration in your JSON job specification. Even if
16069// you don't include any children of nielsenConfiguration, you still enable
16070// the setting.
16071type NielsenConfiguration struct {
16072	_ struct{} `type:"structure"`
16073
16074	// Nielsen has discontinued the use of breakout code functionality. If you must
16075	// include this property, set the value to zero.
16076	BreakoutCode *int64 `locationName:"breakoutCode" type:"integer"`
16077
16078	// Use Distributor ID (DistributorID) to specify the distributor ID that is
16079	// assigned to your organization by Neilsen.
16080	DistributorId *string `locationName:"distributorId" type:"string"`
16081}
16082
16083// String returns the string representation
16084func (s NielsenConfiguration) String() string {
16085	return awsutil.Prettify(s)
16086}
16087
16088// GoString returns the string representation
16089func (s NielsenConfiguration) GoString() string {
16090	return s.String()
16091}
16092
16093// SetBreakoutCode sets the BreakoutCode field's value.
16094func (s *NielsenConfiguration) SetBreakoutCode(v int64) *NielsenConfiguration {
16095	s.BreakoutCode = &v
16096	return s
16097}
16098
16099// SetDistributorId sets the DistributorId field's value.
16100func (s *NielsenConfiguration) SetDistributorId(v string) *NielsenConfiguration {
16101	s.DistributorId = &v
16102	return s
16103}
16104
16105// Ignore these settings unless you are using Nielsen non-linear watermarking.
16106// Specify the values that MediaConvert uses to generate and place Nielsen watermarks
16107// in your output audio. In addition to specifying these values, you also need
16108// to set up your cloud TIC server. These settings apply to every output in
16109// your job. The MediaConvert implementation is currently with the following
16110// Nielsen versions: Nielsen Watermark SDK Version 5.2.1 Nielsen NLM Watermark
16111// Engine Version 1.2.7 Nielsen Watermark Authenticator [SID_TIC] Version [5.0.0]
16112type NielsenNonLinearWatermarkSettings struct {
16113	_ struct{} `type:"structure"`
16114
16115	// Choose the type of Nielsen watermarks that you want in your outputs. When
16116	// you choose NAES 2 and NW (NAES2_AND_NW), you must provide a value for the
16117	// setting SID (sourceId). When you choose CBET (CBET), you must provide a value
16118	// for the setting CSID (cbetSourceId). When you choose NAES 2, NW, and CBET
16119	// (NAES2_AND_NW_AND_CBET), you must provide values for both of these settings.
16120	ActiveWatermarkProcess *string `locationName:"activeWatermarkProcess" type:"string" enum:"NielsenActiveWatermarkProcessType"`
16121
16122	// Optional. Use this setting when you want the service to include an ADI file
16123	// in the Nielsen metadata .zip file. To provide an ADI file, store it in Amazon
16124	// S3 and provide a URL to it here. The URL should be in the following format:
16125	// S3://bucket/path/ADI-file. For more information about the metadata .zip file,
16126	// see the setting Metadata destination (metadataDestination).
16127	AdiFilename *string `locationName:"adiFilename" type:"string"`
16128
16129	// Use the asset ID that you provide to Nielsen to uniquely identify this asset.
16130	// Required for all Nielsen non-linear watermarking.
16131	AssetId *string `locationName:"assetId" min:"1" type:"string"`
16132
16133	// Use the asset name that you provide to Nielsen for this asset. Required for
16134	// all Nielsen non-linear watermarking.
16135	AssetName *string `locationName:"assetName" min:"1" type:"string"`
16136
16137	// Use the CSID that Nielsen provides to you. This CBET source ID should be
16138	// unique to your Nielsen account but common to all of your output assets that
16139	// have CBET watermarking. Required when you choose a value for the setting
16140	// Watermark types (ActiveWatermarkProcess) that includes CBET.
16141	CbetSourceId *string `locationName:"cbetSourceId" type:"string"`
16142
16143	// Optional. If this asset uses an episode ID with Nielsen, provide it here.
16144	EpisodeId *string `locationName:"episodeId" min:"1" type:"string"`
16145
16146	// Specify the Amazon S3 location where you want MediaConvert to save your Nielsen
16147	// non-linear metadata .zip file. This Amazon S3 bucket must be in the same
16148	// Region as the one where you do your MediaConvert transcoding. If you want
16149	// to include an ADI file in this .zip file, use the setting ADI file (adiFilename)
16150	// to specify it. MediaConvert delivers the Nielsen metadata .zip files only
16151	// to your metadata destination Amazon S3 bucket. It doesn't deliver the .zip
16152	// files to Nielsen. You are responsible for delivering the metadata .zip files
16153	// to Nielsen.
16154	MetadataDestination *string `locationName:"metadataDestination" type:"string"`
16155
16156	// Use the SID that Nielsen provides to you. This source ID should be unique
16157	// to your Nielsen account but common to all of your output assets. Required
16158	// for all Nielsen non-linear watermarking. This ID should be unique to your
16159	// Nielsen account but common to all of your output assets. Required for all
16160	// Nielsen non-linear watermarking.
16161	SourceId *int64 `locationName:"sourceId" type:"integer"`
16162
16163	// Required. Specify whether your source content already contains Nielsen non-linear
16164	// watermarks. When you set this value to Watermarked (WATERMARKED), the service
16165	// fails the job. Nielsen requires that you add non-linear watermarking to only
16166	// clean content that doesn't already have non-linear Nielsen watermarks.
16167	SourceWatermarkStatus *string `locationName:"sourceWatermarkStatus" type:"string" enum:"NielsenSourceWatermarkStatusType"`
16168
16169	// Specify the endpoint for the TIC server that you have deployed and configured
16170	// in the AWS Cloud. Required for all Nielsen non-linear watermarking. MediaConvert
16171	// can't connect directly to a TIC server. Instead, you must use API Gateway
16172	// to provide a RESTful interface between MediaConvert and a TIC server that
16173	// you deploy in your AWS account. For more information on deploying a TIC server
16174	// in your AWS account and the required API Gateway, contact Nielsen support.
16175	TicServerUrl *string `locationName:"ticServerUrl" type:"string"`
16176
16177	// To create assets that have the same TIC values in each audio track, keep
16178	// the default value Share TICs (SAME_TICS_PER_TRACK). To create assets that
16179	// have unique TIC values for each audio track, choose Use unique TICs (RESERVE_UNIQUE_TICS_PER_TRACK).
16180	UniqueTicPerAudioTrack *string `locationName:"uniqueTicPerAudioTrack" type:"string" enum:"NielsenUniqueTicPerAudioTrackType"`
16181}
16182
16183// String returns the string representation
16184func (s NielsenNonLinearWatermarkSettings) String() string {
16185	return awsutil.Prettify(s)
16186}
16187
16188// GoString returns the string representation
16189func (s NielsenNonLinearWatermarkSettings) GoString() string {
16190	return s.String()
16191}
16192
16193// Validate inspects the fields of the type to determine if they are valid.
16194func (s *NielsenNonLinearWatermarkSettings) Validate() error {
16195	invalidParams := request.ErrInvalidParams{Context: "NielsenNonLinearWatermarkSettings"}
16196	if s.AssetId != nil && len(*s.AssetId) < 1 {
16197		invalidParams.Add(request.NewErrParamMinLen("AssetId", 1))
16198	}
16199	if s.AssetName != nil && len(*s.AssetName) < 1 {
16200		invalidParams.Add(request.NewErrParamMinLen("AssetName", 1))
16201	}
16202	if s.EpisodeId != nil && len(*s.EpisodeId) < 1 {
16203		invalidParams.Add(request.NewErrParamMinLen("EpisodeId", 1))
16204	}
16205
16206	if invalidParams.Len() > 0 {
16207		return invalidParams
16208	}
16209	return nil
16210}
16211
16212// SetActiveWatermarkProcess sets the ActiveWatermarkProcess field's value.
16213func (s *NielsenNonLinearWatermarkSettings) SetActiveWatermarkProcess(v string) *NielsenNonLinearWatermarkSettings {
16214	s.ActiveWatermarkProcess = &v
16215	return s
16216}
16217
16218// SetAdiFilename sets the AdiFilename field's value.
16219func (s *NielsenNonLinearWatermarkSettings) SetAdiFilename(v string) *NielsenNonLinearWatermarkSettings {
16220	s.AdiFilename = &v
16221	return s
16222}
16223
16224// SetAssetId sets the AssetId field's value.
16225func (s *NielsenNonLinearWatermarkSettings) SetAssetId(v string) *NielsenNonLinearWatermarkSettings {
16226	s.AssetId = &v
16227	return s
16228}
16229
16230// SetAssetName sets the AssetName field's value.
16231func (s *NielsenNonLinearWatermarkSettings) SetAssetName(v string) *NielsenNonLinearWatermarkSettings {
16232	s.AssetName = &v
16233	return s
16234}
16235
16236// SetCbetSourceId sets the CbetSourceId field's value.
16237func (s *NielsenNonLinearWatermarkSettings) SetCbetSourceId(v string) *NielsenNonLinearWatermarkSettings {
16238	s.CbetSourceId = &v
16239	return s
16240}
16241
16242// SetEpisodeId sets the EpisodeId field's value.
16243func (s *NielsenNonLinearWatermarkSettings) SetEpisodeId(v string) *NielsenNonLinearWatermarkSettings {
16244	s.EpisodeId = &v
16245	return s
16246}
16247
16248// SetMetadataDestination sets the MetadataDestination field's value.
16249func (s *NielsenNonLinearWatermarkSettings) SetMetadataDestination(v string) *NielsenNonLinearWatermarkSettings {
16250	s.MetadataDestination = &v
16251	return s
16252}
16253
16254// SetSourceId sets the SourceId field's value.
16255func (s *NielsenNonLinearWatermarkSettings) SetSourceId(v int64) *NielsenNonLinearWatermarkSettings {
16256	s.SourceId = &v
16257	return s
16258}
16259
16260// SetSourceWatermarkStatus sets the SourceWatermarkStatus field's value.
16261func (s *NielsenNonLinearWatermarkSettings) SetSourceWatermarkStatus(v string) *NielsenNonLinearWatermarkSettings {
16262	s.SourceWatermarkStatus = &v
16263	return s
16264}
16265
16266// SetTicServerUrl sets the TicServerUrl field's value.
16267func (s *NielsenNonLinearWatermarkSettings) SetTicServerUrl(v string) *NielsenNonLinearWatermarkSettings {
16268	s.TicServerUrl = &v
16269	return s
16270}
16271
16272// SetUniqueTicPerAudioTrack sets the UniqueTicPerAudioTrack field's value.
16273func (s *NielsenNonLinearWatermarkSettings) SetUniqueTicPerAudioTrack(v string) *NielsenNonLinearWatermarkSettings {
16274	s.UniqueTicPerAudioTrack = &v
16275	return s
16276}
16277
16278// Enable the Noise reducer (NoiseReducer) feature to remove noise from your
16279// video output if necessary. Enable or disable this feature for each output
16280// individually. This setting is disabled by default. When you enable Noise
16281// reducer (NoiseReducer), you must also select a value for Noise reducer filter
16282// (NoiseReducerFilter).
16283type NoiseReducer struct {
16284	_ struct{} `type:"structure"`
16285
16286	// Use Noise reducer filter (NoiseReducerFilter) to select one of the following
16287	// spatial image filtering functions. To use this setting, you must also enable
16288	// Noise reducer (NoiseReducer). * Bilateral preserves edges while reducing
16289	// noise. * Mean (softest), Gaussian, Lanczos, and Sharpen (sharpest) do convolution
16290	// filtering. * Conserve does min/max noise reduction. * Spatial does frequency-domain
16291	// filtering based on JND principles. * Temporal optimizes video quality for
16292	// complex motion.
16293	Filter *string `locationName:"filter" type:"string" enum:"NoiseReducerFilter"`
16294
16295	// Settings for a noise reducer filter
16296	FilterSettings *NoiseReducerFilterSettings `locationName:"filterSettings" type:"structure"`
16297
16298	// Noise reducer filter settings for spatial filter.
16299	SpatialFilterSettings *NoiseReducerSpatialFilterSettings `locationName:"spatialFilterSettings" type:"structure"`
16300
16301	// Noise reducer filter settings for temporal filter.
16302	TemporalFilterSettings *NoiseReducerTemporalFilterSettings `locationName:"temporalFilterSettings" type:"structure"`
16303}
16304
16305// String returns the string representation
16306func (s NoiseReducer) String() string {
16307	return awsutil.Prettify(s)
16308}
16309
16310// GoString returns the string representation
16311func (s NoiseReducer) GoString() string {
16312	return s.String()
16313}
16314
16315// Validate inspects the fields of the type to determine if they are valid.
16316func (s *NoiseReducer) Validate() error {
16317	invalidParams := request.ErrInvalidParams{Context: "NoiseReducer"}
16318	if s.SpatialFilterSettings != nil {
16319		if err := s.SpatialFilterSettings.Validate(); err != nil {
16320			invalidParams.AddNested("SpatialFilterSettings", err.(request.ErrInvalidParams))
16321		}
16322	}
16323	if s.TemporalFilterSettings != nil {
16324		if err := s.TemporalFilterSettings.Validate(); err != nil {
16325			invalidParams.AddNested("TemporalFilterSettings", err.(request.ErrInvalidParams))
16326		}
16327	}
16328
16329	if invalidParams.Len() > 0 {
16330		return invalidParams
16331	}
16332	return nil
16333}
16334
16335// SetFilter sets the Filter field's value.
16336func (s *NoiseReducer) SetFilter(v string) *NoiseReducer {
16337	s.Filter = &v
16338	return s
16339}
16340
16341// SetFilterSettings sets the FilterSettings field's value.
16342func (s *NoiseReducer) SetFilterSettings(v *NoiseReducerFilterSettings) *NoiseReducer {
16343	s.FilterSettings = v
16344	return s
16345}
16346
16347// SetSpatialFilterSettings sets the SpatialFilterSettings field's value.
16348func (s *NoiseReducer) SetSpatialFilterSettings(v *NoiseReducerSpatialFilterSettings) *NoiseReducer {
16349	s.SpatialFilterSettings = v
16350	return s
16351}
16352
16353// SetTemporalFilterSettings sets the TemporalFilterSettings field's value.
16354func (s *NoiseReducer) SetTemporalFilterSettings(v *NoiseReducerTemporalFilterSettings) *NoiseReducer {
16355	s.TemporalFilterSettings = v
16356	return s
16357}
16358
16359// Settings for a noise reducer filter
16360type NoiseReducerFilterSettings struct {
16361	_ struct{} `type:"structure"`
16362
16363	// Relative strength of noise reducing filter. Higher values produce stronger
16364	// filtering.
16365	Strength *int64 `locationName:"strength" type:"integer"`
16366}
16367
16368// String returns the string representation
16369func (s NoiseReducerFilterSettings) String() string {
16370	return awsutil.Prettify(s)
16371}
16372
16373// GoString returns the string representation
16374func (s NoiseReducerFilterSettings) GoString() string {
16375	return s.String()
16376}
16377
16378// SetStrength sets the Strength field's value.
16379func (s *NoiseReducerFilterSettings) SetStrength(v int64) *NoiseReducerFilterSettings {
16380	s.Strength = &v
16381	return s
16382}
16383
16384// Noise reducer filter settings for spatial filter.
16385type NoiseReducerSpatialFilterSettings struct {
16386	_ struct{} `type:"structure"`
16387
16388	// Specify strength of post noise reduction sharpening filter, with 0 disabling
16389	// the filter and 3 enabling it at maximum strength.
16390	PostFilterSharpenStrength *int64 `locationName:"postFilterSharpenStrength" type:"integer"`
16391
16392	// The speed of the filter, from -2 (lower speed) to 3 (higher speed), with
16393	// 0 being the nominal value.
16394	Speed *int64 `locationName:"speed" type:"integer"`
16395
16396	// Relative strength of noise reducing filter. Higher values produce stronger
16397	// filtering.
16398	Strength *int64 `locationName:"strength" type:"integer"`
16399}
16400
16401// String returns the string representation
16402func (s NoiseReducerSpatialFilterSettings) String() string {
16403	return awsutil.Prettify(s)
16404}
16405
16406// GoString returns the string representation
16407func (s NoiseReducerSpatialFilterSettings) GoString() string {
16408	return s.String()
16409}
16410
16411// Validate inspects the fields of the type to determine if they are valid.
16412func (s *NoiseReducerSpatialFilterSettings) Validate() error {
16413	invalidParams := request.ErrInvalidParams{Context: "NoiseReducerSpatialFilterSettings"}
16414	if s.Speed != nil && *s.Speed < -2 {
16415		invalidParams.Add(request.NewErrParamMinValue("Speed", -2))
16416	}
16417
16418	if invalidParams.Len() > 0 {
16419		return invalidParams
16420	}
16421	return nil
16422}
16423
16424// SetPostFilterSharpenStrength sets the PostFilterSharpenStrength field's value.
16425func (s *NoiseReducerSpatialFilterSettings) SetPostFilterSharpenStrength(v int64) *NoiseReducerSpatialFilterSettings {
16426	s.PostFilterSharpenStrength = &v
16427	return s
16428}
16429
16430// SetSpeed sets the Speed field's value.
16431func (s *NoiseReducerSpatialFilterSettings) SetSpeed(v int64) *NoiseReducerSpatialFilterSettings {
16432	s.Speed = &v
16433	return s
16434}
16435
16436// SetStrength sets the Strength field's value.
16437func (s *NoiseReducerSpatialFilterSettings) SetStrength(v int64) *NoiseReducerSpatialFilterSettings {
16438	s.Strength = &v
16439	return s
16440}
16441
16442// Noise reducer filter settings for temporal filter.
16443type NoiseReducerTemporalFilterSettings struct {
16444	_ struct{} `type:"structure"`
16445
16446	// Use Aggressive mode for content that has complex motion. Higher values produce
16447	// stronger temporal filtering. This filters highly complex scenes more aggressively
16448	// and creates better VQ for low bitrate outputs.
16449	AggressiveMode *int64 `locationName:"aggressiveMode" type:"integer"`
16450
16451	// Optional. When you set Noise reducer (noiseReducer) to Temporal (TEMPORAL),
16452	// you can use this setting to apply sharpening. The default behavior, Auto
16453	// (AUTO), allows the transcoder to determine whether to apply filtering, depending
16454	// on input type and quality. When you set Noise reducer to Temporal, your output
16455	// bandwidth is reduced. When Post temporal sharpening is also enabled, that
16456	// bandwidth reduction is smaller.
16457	PostTemporalSharpening *string `locationName:"postTemporalSharpening" type:"string" enum:"NoiseFilterPostTemporalSharpening"`
16458
16459	// The speed of the filter (higher number is faster). Low setting reduces bit
16460	// rate at the cost of transcode time, high setting improves transcode time
16461	// at the cost of bit rate.
16462	Speed *int64 `locationName:"speed" type:"integer"`
16463
16464	// Specify the strength of the noise reducing filter on this output. Higher
16465	// values produce stronger filtering. We recommend the following value ranges,
16466	// depending on the result that you want: * 0-2 for complexity reduction with
16467	// minimal sharpness loss * 2-8 for complexity reduction with image preservation
16468	// * 8-16 for a high level of complexity reduction
16469	Strength *int64 `locationName:"strength" type:"integer"`
16470}
16471
16472// String returns the string representation
16473func (s NoiseReducerTemporalFilterSettings) String() string {
16474	return awsutil.Prettify(s)
16475}
16476
16477// GoString returns the string representation
16478func (s NoiseReducerTemporalFilterSettings) GoString() string {
16479	return s.String()
16480}
16481
16482// Validate inspects the fields of the type to determine if they are valid.
16483func (s *NoiseReducerTemporalFilterSettings) Validate() error {
16484	invalidParams := request.ErrInvalidParams{Context: "NoiseReducerTemporalFilterSettings"}
16485	if s.Speed != nil && *s.Speed < -1 {
16486		invalidParams.Add(request.NewErrParamMinValue("Speed", -1))
16487	}
16488
16489	if invalidParams.Len() > 0 {
16490		return invalidParams
16491	}
16492	return nil
16493}
16494
16495// SetAggressiveMode sets the AggressiveMode field's value.
16496func (s *NoiseReducerTemporalFilterSettings) SetAggressiveMode(v int64) *NoiseReducerTemporalFilterSettings {
16497	s.AggressiveMode = &v
16498	return s
16499}
16500
16501// SetPostTemporalSharpening sets the PostTemporalSharpening field's value.
16502func (s *NoiseReducerTemporalFilterSettings) SetPostTemporalSharpening(v string) *NoiseReducerTemporalFilterSettings {
16503	s.PostTemporalSharpening = &v
16504	return s
16505}
16506
16507// SetSpeed sets the Speed field's value.
16508func (s *NoiseReducerTemporalFilterSettings) SetSpeed(v int64) *NoiseReducerTemporalFilterSettings {
16509	s.Speed = &v
16510	return s
16511}
16512
16513// SetStrength sets the Strength field's value.
16514func (s *NoiseReducerTemporalFilterSettings) SetStrength(v int64) *NoiseReducerTemporalFilterSettings {
16515	s.Strength = &v
16516	return s
16517}
16518
16519type NotFoundException struct {
16520	_            struct{}                  `type:"structure"`
16521	RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"`
16522
16523	Message_ *string `locationName:"message" type:"string"`
16524}
16525
16526// String returns the string representation
16527func (s NotFoundException) String() string {
16528	return awsutil.Prettify(s)
16529}
16530
16531// GoString returns the string representation
16532func (s NotFoundException) GoString() string {
16533	return s.String()
16534}
16535
16536func newErrorNotFoundException(v protocol.ResponseMetadata) error {
16537	return &NotFoundException{
16538		RespMetadata: v,
16539	}
16540}
16541
16542// Code returns the exception type name.
16543func (s *NotFoundException) Code() string {
16544	return "NotFoundException"
16545}
16546
16547// Message returns the exception's message.
16548func (s *NotFoundException) Message() string {
16549	if s.Message_ != nil {
16550		return *s.Message_
16551	}
16552	return ""
16553}
16554
16555// OrigErr always returns nil, satisfies awserr.Error interface.
16556func (s *NotFoundException) OrigErr() error {
16557	return nil
16558}
16559
16560func (s *NotFoundException) Error() string {
16561	return fmt.Sprintf("%s: %s", s.Code(), s.Message())
16562}
16563
16564// Status code returns the HTTP status code for the request's response error.
16565func (s *NotFoundException) StatusCode() int {
16566	return s.RespMetadata.StatusCode
16567}
16568
16569// RequestID returns the service's response RequestID for request.
16570func (s *NotFoundException) RequestID() string {
16571	return s.RespMetadata.RequestID
16572}
16573
16574// Required when you set Codec, under AudioDescriptions>CodecSettings, to the
16575// value OPUS.
16576type OpusSettings struct {
16577	_ struct{} `type:"structure"`
16578
16579	// Optional. Specify the average bitrate in bits per second. Valid values are
16580	// multiples of 8000, from 32000 through 192000. The default value is 96000,
16581	// which we recommend for quality and bandwidth.
16582	Bitrate *int64 `locationName:"bitrate" min:"32000" type:"integer"`
16583
16584	// Specify the number of channels in this output audio track. Choosing Mono
16585	// on the console gives you 1 output channel; choosing Stereo gives you 2. In
16586	// the API, valid values are 1 and 2.
16587	Channels *int64 `locationName:"channels" min:"1" type:"integer"`
16588
16589	// Optional. Sample rate in hz. Valid values are 16000, 24000, and 48000. The
16590	// default value is 48000.
16591	SampleRate *int64 `locationName:"sampleRate" min:"16000" type:"integer"`
16592}
16593
16594// String returns the string representation
16595func (s OpusSettings) String() string {
16596	return awsutil.Prettify(s)
16597}
16598
16599// GoString returns the string representation
16600func (s OpusSettings) GoString() string {
16601	return s.String()
16602}
16603
16604// Validate inspects the fields of the type to determine if they are valid.
16605func (s *OpusSettings) Validate() error {
16606	invalidParams := request.ErrInvalidParams{Context: "OpusSettings"}
16607	if s.Bitrate != nil && *s.Bitrate < 32000 {
16608		invalidParams.Add(request.NewErrParamMinValue("Bitrate", 32000))
16609	}
16610	if s.Channels != nil && *s.Channels < 1 {
16611		invalidParams.Add(request.NewErrParamMinValue("Channels", 1))
16612	}
16613	if s.SampleRate != nil && *s.SampleRate < 16000 {
16614		invalidParams.Add(request.NewErrParamMinValue("SampleRate", 16000))
16615	}
16616
16617	if invalidParams.Len() > 0 {
16618		return invalidParams
16619	}
16620	return nil
16621}
16622
16623// SetBitrate sets the Bitrate field's value.
16624func (s *OpusSettings) SetBitrate(v int64) *OpusSettings {
16625	s.Bitrate = &v
16626	return s
16627}
16628
16629// SetChannels sets the Channels field's value.
16630func (s *OpusSettings) SetChannels(v int64) *OpusSettings {
16631	s.Channels = &v
16632	return s
16633}
16634
16635// SetSampleRate sets the SampleRate field's value.
16636func (s *OpusSettings) SetSampleRate(v int64) *OpusSettings {
16637	s.SampleRate = &v
16638	return s
16639}
16640
16641// An output object describes the settings for a single output file or stream
16642// in an output group.
16643type Output struct {
16644	_ struct{} `type:"structure"`
16645
16646	// (AudioDescriptions) contains groups of audio encoding settings organized
16647	// by audio codec. Include one instance of (AudioDescriptions) per output. (AudioDescriptions)
16648	// can contain multiple groups of encoding settings.
16649	AudioDescriptions []*AudioDescription `locationName:"audioDescriptions" type:"list"`
16650
16651	// (CaptionDescriptions) contains groups of captions settings. For each output
16652	// that has captions, include one instance of (CaptionDescriptions). (CaptionDescriptions)
16653	// can contain multiple groups of captions settings.
16654	CaptionDescriptions []*CaptionDescription `locationName:"captionDescriptions" type:"list"`
16655
16656	// Container specific settings.
16657	ContainerSettings *ContainerSettings `locationName:"containerSettings" type:"structure"`
16658
16659	// Use Extension (Extension) to specify the file extension for outputs in File
16660	// output groups. If you do not specify a value, the service will use default
16661	// extensions by container type as follows * MPEG-2 transport stream, m2ts *
16662	// Quicktime, mov * MXF container, mxf * MPEG-4 container, mp4 * WebM container,
16663	// webm * No Container, the service will use codec extensions (e.g. AAC, H265,
16664	// H265, AC3)
16665	Extension *string `locationName:"extension" type:"string"`
16666
16667	// Use Name modifier (NameModifier) to have the service add a string to the
16668	// end of each output filename. You specify the base filename as part of your
16669	// destination URI. When you create multiple outputs in the same output group,
16670	// Name modifier (NameModifier) is required. Name modifier also accepts format
16671	// identifiers. For DASH ISO outputs, if you use the format identifiers $Number$
16672	// or $Time$ in one output, you must use them in the same way in all outputs
16673	// of the output group.
16674	NameModifier *string `locationName:"nameModifier" min:"1" type:"string"`
16675
16676	// Specific settings for this type of output.
16677	OutputSettings *OutputSettings `locationName:"outputSettings" type:"structure"`
16678
16679	// Use Preset (Preset) to specify a preset for your transcoding settings. Provide
16680	// the system or custom preset name. You can specify either Preset (Preset)
16681	// or Container settings (ContainerSettings), but not both.
16682	Preset *string `locationName:"preset" type:"string"`
16683
16684	// (VideoDescription) contains a group of video encoding settings. The specific
16685	// video settings depend on the video codec that you choose when you specify
16686	// a value for Video codec (codec). Include one instance of (VideoDescription)
16687	// per output.
16688	VideoDescription *VideoDescription `locationName:"videoDescription" type:"structure"`
16689}
16690
16691// String returns the string representation
16692func (s Output) String() string {
16693	return awsutil.Prettify(s)
16694}
16695
16696// GoString returns the string representation
16697func (s Output) GoString() string {
16698	return s.String()
16699}
16700
16701// Validate inspects the fields of the type to determine if they are valid.
16702func (s *Output) Validate() error {
16703	invalidParams := request.ErrInvalidParams{Context: "Output"}
16704	if s.NameModifier != nil && len(*s.NameModifier) < 1 {
16705		invalidParams.Add(request.NewErrParamMinLen("NameModifier", 1))
16706	}
16707	if s.AudioDescriptions != nil {
16708		for i, v := range s.AudioDescriptions {
16709			if v == nil {
16710				continue
16711			}
16712			if err := v.Validate(); err != nil {
16713				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "AudioDescriptions", i), err.(request.ErrInvalidParams))
16714			}
16715		}
16716	}
16717	if s.CaptionDescriptions != nil {
16718		for i, v := range s.CaptionDescriptions {
16719			if v == nil {
16720				continue
16721			}
16722			if err := v.Validate(); err != nil {
16723				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "CaptionDescriptions", i), err.(request.ErrInvalidParams))
16724			}
16725		}
16726	}
16727	if s.ContainerSettings != nil {
16728		if err := s.ContainerSettings.Validate(); err != nil {
16729			invalidParams.AddNested("ContainerSettings", err.(request.ErrInvalidParams))
16730		}
16731	}
16732	if s.VideoDescription != nil {
16733		if err := s.VideoDescription.Validate(); err != nil {
16734			invalidParams.AddNested("VideoDescription", err.(request.ErrInvalidParams))
16735		}
16736	}
16737
16738	if invalidParams.Len() > 0 {
16739		return invalidParams
16740	}
16741	return nil
16742}
16743
16744// SetAudioDescriptions sets the AudioDescriptions field's value.
16745func (s *Output) SetAudioDescriptions(v []*AudioDescription) *Output {
16746	s.AudioDescriptions = v
16747	return s
16748}
16749
16750// SetCaptionDescriptions sets the CaptionDescriptions field's value.
16751func (s *Output) SetCaptionDescriptions(v []*CaptionDescription) *Output {
16752	s.CaptionDescriptions = v
16753	return s
16754}
16755
16756// SetContainerSettings sets the ContainerSettings field's value.
16757func (s *Output) SetContainerSettings(v *ContainerSettings) *Output {
16758	s.ContainerSettings = v
16759	return s
16760}
16761
16762// SetExtension sets the Extension field's value.
16763func (s *Output) SetExtension(v string) *Output {
16764	s.Extension = &v
16765	return s
16766}
16767
16768// SetNameModifier sets the NameModifier field's value.
16769func (s *Output) SetNameModifier(v string) *Output {
16770	s.NameModifier = &v
16771	return s
16772}
16773
16774// SetOutputSettings sets the OutputSettings field's value.
16775func (s *Output) SetOutputSettings(v *OutputSettings) *Output {
16776	s.OutputSettings = v
16777	return s
16778}
16779
16780// SetPreset sets the Preset field's value.
16781func (s *Output) SetPreset(v string) *Output {
16782	s.Preset = &v
16783	return s
16784}
16785
16786// SetVideoDescription sets the VideoDescription field's value.
16787func (s *Output) SetVideoDescription(v *VideoDescription) *Output {
16788	s.VideoDescription = v
16789	return s
16790}
16791
16792// OutputChannel mapping settings.
16793type OutputChannelMapping struct {
16794	_ struct{} `type:"structure"`
16795
16796	// Use this setting to specify your remix values when they are integers, such
16797	// as -10, 0, or 4.
16798	InputChannels []*int64 `locationName:"inputChannels" type:"list"`
16799
16800	// Use this setting to specify your remix values when they have a decimal component,
16801	// such as -10.312, 0.08, or 4.9. MediaConvert rounds your remixing values to
16802	// the nearest thousandth.
16803	InputChannelsFineTune []*float64 `locationName:"inputChannelsFineTune" type:"list"`
16804}
16805
16806// String returns the string representation
16807func (s OutputChannelMapping) String() string {
16808	return awsutil.Prettify(s)
16809}
16810
16811// GoString returns the string representation
16812func (s OutputChannelMapping) GoString() string {
16813	return s.String()
16814}
16815
16816// SetInputChannels sets the InputChannels field's value.
16817func (s *OutputChannelMapping) SetInputChannels(v []*int64) *OutputChannelMapping {
16818	s.InputChannels = v
16819	return s
16820}
16821
16822// SetInputChannelsFineTune sets the InputChannelsFineTune field's value.
16823func (s *OutputChannelMapping) SetInputChannelsFineTune(v []*float64) *OutputChannelMapping {
16824	s.InputChannelsFineTune = v
16825	return s
16826}
16827
16828// Details regarding output
16829type OutputDetail struct {
16830	_ struct{} `type:"structure"`
16831
16832	// Duration in milliseconds
16833	DurationInMs *int64 `locationName:"durationInMs" type:"integer"`
16834
16835	// Contains details about the output's video stream
16836	VideoDetails *VideoDetail `locationName:"videoDetails" type:"structure"`
16837}
16838
16839// String returns the string representation
16840func (s OutputDetail) String() string {
16841	return awsutil.Prettify(s)
16842}
16843
16844// GoString returns the string representation
16845func (s OutputDetail) GoString() string {
16846	return s.String()
16847}
16848
16849// SetDurationInMs sets the DurationInMs field's value.
16850func (s *OutputDetail) SetDurationInMs(v int64) *OutputDetail {
16851	s.DurationInMs = &v
16852	return s
16853}
16854
16855// SetVideoDetails sets the VideoDetails field's value.
16856func (s *OutputDetail) SetVideoDetails(v *VideoDetail) *OutputDetail {
16857	s.VideoDetails = v
16858	return s
16859}
16860
16861// Group of outputs
16862type OutputGroup struct {
16863	_ struct{} `type:"structure"`
16864
16865	// Use automated encoding to have MediaConvert choose your encoding settings
16866	// for you, based on characteristics of your input video.
16867	AutomatedEncodingSettings *AutomatedEncodingSettings `locationName:"automatedEncodingSettings" type:"structure"`
16868
16869	// Use Custom Group Name (CustomName) to specify a name for the output group.
16870	// This value is displayed on the console and can make your job settings JSON
16871	// more human-readable. It does not affect your outputs. Use up to twelve characters
16872	// that are either letters, numbers, spaces, or underscores.
16873	CustomName *string `locationName:"customName" type:"string"`
16874
16875	// Name of the output group
16876	Name *string `locationName:"name" type:"string"`
16877
16878	// Output Group settings, including type
16879	OutputGroupSettings *OutputGroupSettings `locationName:"outputGroupSettings" type:"structure"`
16880
16881	// This object holds groups of encoding settings, one group of settings per
16882	// output.
16883	Outputs []*Output `locationName:"outputs" type:"list"`
16884}
16885
16886// String returns the string representation
16887func (s OutputGroup) String() string {
16888	return awsutil.Prettify(s)
16889}
16890
16891// GoString returns the string representation
16892func (s OutputGroup) GoString() string {
16893	return s.String()
16894}
16895
16896// Validate inspects the fields of the type to determine if they are valid.
16897func (s *OutputGroup) Validate() error {
16898	invalidParams := request.ErrInvalidParams{Context: "OutputGroup"}
16899	if s.AutomatedEncodingSettings != nil {
16900		if err := s.AutomatedEncodingSettings.Validate(); err != nil {
16901			invalidParams.AddNested("AutomatedEncodingSettings", err.(request.ErrInvalidParams))
16902		}
16903	}
16904	if s.OutputGroupSettings != nil {
16905		if err := s.OutputGroupSettings.Validate(); err != nil {
16906			invalidParams.AddNested("OutputGroupSettings", err.(request.ErrInvalidParams))
16907		}
16908	}
16909	if s.Outputs != nil {
16910		for i, v := range s.Outputs {
16911			if v == nil {
16912				continue
16913			}
16914			if err := v.Validate(); err != nil {
16915				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "Outputs", i), err.(request.ErrInvalidParams))
16916			}
16917		}
16918	}
16919
16920	if invalidParams.Len() > 0 {
16921		return invalidParams
16922	}
16923	return nil
16924}
16925
16926// SetAutomatedEncodingSettings sets the AutomatedEncodingSettings field's value.
16927func (s *OutputGroup) SetAutomatedEncodingSettings(v *AutomatedEncodingSettings) *OutputGroup {
16928	s.AutomatedEncodingSettings = v
16929	return s
16930}
16931
16932// SetCustomName sets the CustomName field's value.
16933func (s *OutputGroup) SetCustomName(v string) *OutputGroup {
16934	s.CustomName = &v
16935	return s
16936}
16937
16938// SetName sets the Name field's value.
16939func (s *OutputGroup) SetName(v string) *OutputGroup {
16940	s.Name = &v
16941	return s
16942}
16943
16944// SetOutputGroupSettings sets the OutputGroupSettings field's value.
16945func (s *OutputGroup) SetOutputGroupSettings(v *OutputGroupSettings) *OutputGroup {
16946	s.OutputGroupSettings = v
16947	return s
16948}
16949
16950// SetOutputs sets the Outputs field's value.
16951func (s *OutputGroup) SetOutputs(v []*Output) *OutputGroup {
16952	s.Outputs = v
16953	return s
16954}
16955
16956// Contains details about the output groups specified in the job settings.
16957type OutputGroupDetail struct {
16958	_ struct{} `type:"structure"`
16959
16960	// Details about the output
16961	OutputDetails []*OutputDetail `locationName:"outputDetails" type:"list"`
16962}
16963
16964// String returns the string representation
16965func (s OutputGroupDetail) String() string {
16966	return awsutil.Prettify(s)
16967}
16968
16969// GoString returns the string representation
16970func (s OutputGroupDetail) GoString() string {
16971	return s.String()
16972}
16973
16974// SetOutputDetails sets the OutputDetails field's value.
16975func (s *OutputGroupDetail) SetOutputDetails(v []*OutputDetail) *OutputGroupDetail {
16976	s.OutputDetails = v
16977	return s
16978}
16979
16980// Output Group settings, including type
16981type OutputGroupSettings struct {
16982	_ struct{} `type:"structure"`
16983
16984	// Required when you set (Type) under (OutputGroups)>(OutputGroupSettings) to
16985	// CMAF_GROUP_SETTINGS. Each output in a CMAF Output Group may only contain
16986	// a single video, audio, or caption output.
16987	CmafGroupSettings *CmafGroupSettings `locationName:"cmafGroupSettings" type:"structure"`
16988
16989	// Required when you set (Type) under (OutputGroups)>(OutputGroupSettings) to
16990	// DASH_ISO_GROUP_SETTINGS.
16991	DashIsoGroupSettings *DashIsoGroupSettings `locationName:"dashIsoGroupSettings" type:"structure"`
16992
16993	// Required when you set (Type) under (OutputGroups)>(OutputGroupSettings) to
16994	// FILE_GROUP_SETTINGS.
16995	FileGroupSettings *FileGroupSettings `locationName:"fileGroupSettings" type:"structure"`
16996
16997	// Required when you set (Type) under (OutputGroups)>(OutputGroupSettings) to
16998	// HLS_GROUP_SETTINGS.
16999	HlsGroupSettings *HlsGroupSettings `locationName:"hlsGroupSettings" type:"structure"`
17000
17001	// Required when you set (Type) under (OutputGroups)>(OutputGroupSettings) to
17002	// MS_SMOOTH_GROUP_SETTINGS.
17003	MsSmoothGroupSettings *MsSmoothGroupSettings `locationName:"msSmoothGroupSettings" type:"structure"`
17004
17005	// Type of output group (File group, Apple HLS, DASH ISO, Microsoft Smooth Streaming,
17006	// CMAF)
17007	Type *string `locationName:"type" type:"string" enum:"OutputGroupType"`
17008}
17009
17010// String returns the string representation
17011func (s OutputGroupSettings) String() string {
17012	return awsutil.Prettify(s)
17013}
17014
17015// GoString returns the string representation
17016func (s OutputGroupSettings) GoString() string {
17017	return s.String()
17018}
17019
17020// Validate inspects the fields of the type to determine if they are valid.
17021func (s *OutputGroupSettings) Validate() error {
17022	invalidParams := request.ErrInvalidParams{Context: "OutputGroupSettings"}
17023	if s.CmafGroupSettings != nil {
17024		if err := s.CmafGroupSettings.Validate(); err != nil {
17025			invalidParams.AddNested("CmafGroupSettings", err.(request.ErrInvalidParams))
17026		}
17027	}
17028	if s.DashIsoGroupSettings != nil {
17029		if err := s.DashIsoGroupSettings.Validate(); err != nil {
17030			invalidParams.AddNested("DashIsoGroupSettings", err.(request.ErrInvalidParams))
17031		}
17032	}
17033	if s.HlsGroupSettings != nil {
17034		if err := s.HlsGroupSettings.Validate(); err != nil {
17035			invalidParams.AddNested("HlsGroupSettings", err.(request.ErrInvalidParams))
17036		}
17037	}
17038	if s.MsSmoothGroupSettings != nil {
17039		if err := s.MsSmoothGroupSettings.Validate(); err != nil {
17040			invalidParams.AddNested("MsSmoothGroupSettings", err.(request.ErrInvalidParams))
17041		}
17042	}
17043
17044	if invalidParams.Len() > 0 {
17045		return invalidParams
17046	}
17047	return nil
17048}
17049
17050// SetCmafGroupSettings sets the CmafGroupSettings field's value.
17051func (s *OutputGroupSettings) SetCmafGroupSettings(v *CmafGroupSettings) *OutputGroupSettings {
17052	s.CmafGroupSettings = v
17053	return s
17054}
17055
17056// SetDashIsoGroupSettings sets the DashIsoGroupSettings field's value.
17057func (s *OutputGroupSettings) SetDashIsoGroupSettings(v *DashIsoGroupSettings) *OutputGroupSettings {
17058	s.DashIsoGroupSettings = v
17059	return s
17060}
17061
17062// SetFileGroupSettings sets the FileGroupSettings field's value.
17063func (s *OutputGroupSettings) SetFileGroupSettings(v *FileGroupSettings) *OutputGroupSettings {
17064	s.FileGroupSettings = v
17065	return s
17066}
17067
17068// SetHlsGroupSettings sets the HlsGroupSettings field's value.
17069func (s *OutputGroupSettings) SetHlsGroupSettings(v *HlsGroupSettings) *OutputGroupSettings {
17070	s.HlsGroupSettings = v
17071	return s
17072}
17073
17074// SetMsSmoothGroupSettings sets the MsSmoothGroupSettings field's value.
17075func (s *OutputGroupSettings) SetMsSmoothGroupSettings(v *MsSmoothGroupSettings) *OutputGroupSettings {
17076	s.MsSmoothGroupSettings = v
17077	return s
17078}
17079
17080// SetType sets the Type field's value.
17081func (s *OutputGroupSettings) SetType(v string) *OutputGroupSettings {
17082	s.Type = &v
17083	return s
17084}
17085
17086// Specific settings for this type of output.
17087type OutputSettings struct {
17088	_ struct{} `type:"structure"`
17089
17090	// Settings for HLS output groups
17091	HlsSettings *HlsSettings `locationName:"hlsSettings" type:"structure"`
17092}
17093
17094// String returns the string representation
17095func (s OutputSettings) String() string {
17096	return awsutil.Prettify(s)
17097}
17098
17099// GoString returns the string representation
17100func (s OutputSettings) GoString() string {
17101	return s.String()
17102}
17103
17104// SetHlsSettings sets the HlsSettings field's value.
17105func (s *OutputSettings) SetHlsSettings(v *HlsSettings) *OutputSettings {
17106	s.HlsSettings = v
17107	return s
17108}
17109
17110// If you work with a third party video watermarking partner, use the group
17111// of settings that correspond with your watermarking partner to include watermarks
17112// in your output.
17113type PartnerWatermarking struct {
17114	_ struct{} `type:"structure"`
17115
17116	// For forensic video watermarking, MediaConvert supports Nagra NexGuard File
17117	// Marker watermarking. MediaConvert supports both PreRelease Content (NGPR/G2)
17118	// and OTT Streaming workflows.
17119	NexguardFileMarkerSettings *NexGuardFileMarkerSettings `locationName:"nexguardFileMarkerSettings" type:"structure"`
17120}
17121
17122// String returns the string representation
17123func (s PartnerWatermarking) String() string {
17124	return awsutil.Prettify(s)
17125}
17126
17127// GoString returns the string representation
17128func (s PartnerWatermarking) GoString() string {
17129	return s.String()
17130}
17131
17132// Validate inspects the fields of the type to determine if they are valid.
17133func (s *PartnerWatermarking) Validate() error {
17134	invalidParams := request.ErrInvalidParams{Context: "PartnerWatermarking"}
17135	if s.NexguardFileMarkerSettings != nil {
17136		if err := s.NexguardFileMarkerSettings.Validate(); err != nil {
17137			invalidParams.AddNested("NexguardFileMarkerSettings", err.(request.ErrInvalidParams))
17138		}
17139	}
17140
17141	if invalidParams.Len() > 0 {
17142		return invalidParams
17143	}
17144	return nil
17145}
17146
17147// SetNexguardFileMarkerSettings sets the NexguardFileMarkerSettings field's value.
17148func (s *PartnerWatermarking) SetNexguardFileMarkerSettings(v *NexGuardFileMarkerSettings) *PartnerWatermarking {
17149	s.NexguardFileMarkerSettings = v
17150	return s
17151}
17152
17153// A preset is a collection of preconfigured media conversion settings that
17154// you want MediaConvert to apply to the output during the conversion process.
17155type Preset struct {
17156	_ struct{} `type:"structure"`
17157
17158	// An identifier for this resource that is unique within all of AWS.
17159	Arn *string `locationName:"arn" type:"string"`
17160
17161	// An optional category you create to organize your presets.
17162	Category *string `locationName:"category" type:"string"`
17163
17164	// The timestamp in epoch seconds for preset creation.
17165	CreatedAt *time.Time `locationName:"createdAt" type:"timestamp" timestampFormat:"unixTimestamp"`
17166
17167	// An optional description you create for each preset.
17168	Description *string `locationName:"description" type:"string"`
17169
17170	// The timestamp in epoch seconds when the preset was last updated.
17171	LastUpdated *time.Time `locationName:"lastUpdated" type:"timestamp" timestampFormat:"unixTimestamp"`
17172
17173	// A name you create for each preset. Each name must be unique within your account.
17174	//
17175	// Name is a required field
17176	Name *string `locationName:"name" type:"string" required:"true"`
17177
17178	// Settings for preset
17179	//
17180	// Settings is a required field
17181	Settings *PresetSettings `locationName:"settings" type:"structure" required:"true"`
17182
17183	// A preset can be of two types: system or custom. System or built-in preset
17184	// can't be modified or deleted by the user.
17185	Type *string `locationName:"type" type:"string" enum:"Type"`
17186}
17187
17188// String returns the string representation
17189func (s Preset) String() string {
17190	return awsutil.Prettify(s)
17191}
17192
17193// GoString returns the string representation
17194func (s Preset) GoString() string {
17195	return s.String()
17196}
17197
17198// SetArn sets the Arn field's value.
17199func (s *Preset) SetArn(v string) *Preset {
17200	s.Arn = &v
17201	return s
17202}
17203
17204// SetCategory sets the Category field's value.
17205func (s *Preset) SetCategory(v string) *Preset {
17206	s.Category = &v
17207	return s
17208}
17209
17210// SetCreatedAt sets the CreatedAt field's value.
17211func (s *Preset) SetCreatedAt(v time.Time) *Preset {
17212	s.CreatedAt = &v
17213	return s
17214}
17215
17216// SetDescription sets the Description field's value.
17217func (s *Preset) SetDescription(v string) *Preset {
17218	s.Description = &v
17219	return s
17220}
17221
17222// SetLastUpdated sets the LastUpdated field's value.
17223func (s *Preset) SetLastUpdated(v time.Time) *Preset {
17224	s.LastUpdated = &v
17225	return s
17226}
17227
17228// SetName sets the Name field's value.
17229func (s *Preset) SetName(v string) *Preset {
17230	s.Name = &v
17231	return s
17232}
17233
17234// SetSettings sets the Settings field's value.
17235func (s *Preset) SetSettings(v *PresetSettings) *Preset {
17236	s.Settings = v
17237	return s
17238}
17239
17240// SetType sets the Type field's value.
17241func (s *Preset) SetType(v string) *Preset {
17242	s.Type = &v
17243	return s
17244}
17245
17246// Settings for preset
17247type PresetSettings struct {
17248	_ struct{} `type:"structure"`
17249
17250	// (AudioDescriptions) contains groups of audio encoding settings organized
17251	// by audio codec. Include one instance of (AudioDescriptions) per output. (AudioDescriptions)
17252	// can contain multiple groups of encoding settings.
17253	AudioDescriptions []*AudioDescription `locationName:"audioDescriptions" type:"list"`
17254
17255	// Caption settings for this preset. There can be multiple caption settings
17256	// in a single output.
17257	CaptionDescriptions []*CaptionDescriptionPreset `locationName:"captionDescriptions" type:"list"`
17258
17259	// Container specific settings.
17260	ContainerSettings *ContainerSettings `locationName:"containerSettings" type:"structure"`
17261
17262	// (VideoDescription) contains a group of video encoding settings. The specific
17263	// video settings depend on the video codec that you choose when you specify
17264	// a value for Video codec (codec). Include one instance of (VideoDescription)
17265	// per output.
17266	VideoDescription *VideoDescription `locationName:"videoDescription" type:"structure"`
17267}
17268
17269// String returns the string representation
17270func (s PresetSettings) String() string {
17271	return awsutil.Prettify(s)
17272}
17273
17274// GoString returns the string representation
17275func (s PresetSettings) GoString() string {
17276	return s.String()
17277}
17278
17279// Validate inspects the fields of the type to determine if they are valid.
17280func (s *PresetSettings) Validate() error {
17281	invalidParams := request.ErrInvalidParams{Context: "PresetSettings"}
17282	if s.AudioDescriptions != nil {
17283		for i, v := range s.AudioDescriptions {
17284			if v == nil {
17285				continue
17286			}
17287			if err := v.Validate(); err != nil {
17288				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "AudioDescriptions", i), err.(request.ErrInvalidParams))
17289			}
17290		}
17291	}
17292	if s.CaptionDescriptions != nil {
17293		for i, v := range s.CaptionDescriptions {
17294			if v == nil {
17295				continue
17296			}
17297			if err := v.Validate(); err != nil {
17298				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "CaptionDescriptions", i), err.(request.ErrInvalidParams))
17299			}
17300		}
17301	}
17302	if s.ContainerSettings != nil {
17303		if err := s.ContainerSettings.Validate(); err != nil {
17304			invalidParams.AddNested("ContainerSettings", err.(request.ErrInvalidParams))
17305		}
17306	}
17307	if s.VideoDescription != nil {
17308		if err := s.VideoDescription.Validate(); err != nil {
17309			invalidParams.AddNested("VideoDescription", err.(request.ErrInvalidParams))
17310		}
17311	}
17312
17313	if invalidParams.Len() > 0 {
17314		return invalidParams
17315	}
17316	return nil
17317}
17318
17319// SetAudioDescriptions sets the AudioDescriptions field's value.
17320func (s *PresetSettings) SetAudioDescriptions(v []*AudioDescription) *PresetSettings {
17321	s.AudioDescriptions = v
17322	return s
17323}
17324
17325// SetCaptionDescriptions sets the CaptionDescriptions field's value.
17326func (s *PresetSettings) SetCaptionDescriptions(v []*CaptionDescriptionPreset) *PresetSettings {
17327	s.CaptionDescriptions = v
17328	return s
17329}
17330
17331// SetContainerSettings sets the ContainerSettings field's value.
17332func (s *PresetSettings) SetContainerSettings(v *ContainerSettings) *PresetSettings {
17333	s.ContainerSettings = v
17334	return s
17335}
17336
17337// SetVideoDescription sets the VideoDescription field's value.
17338func (s *PresetSettings) SetVideoDescription(v *VideoDescription) *PresetSettings {
17339	s.VideoDescription = v
17340	return s
17341}
17342
17343// Required when you set (Codec) under (VideoDescription)>(CodecSettings) to
17344// the value PRORES.
17345type ProresSettings struct {
17346	_ struct{} `type:"structure"`
17347
17348	// Use Profile (ProResCodecProfile) to specify the type of Apple ProRes codec
17349	// to use for this output.
17350	CodecProfile *string `locationName:"codecProfile" type:"string" enum:"ProresCodecProfile"`
17351
17352	// If you are using the console, use the Framerate setting to specify the frame
17353	// rate for this output. If you want to keep the same frame rate as the input
17354	// video, choose Follow source. If you want to do frame rate conversion, choose
17355	// a frame rate from the dropdown list or choose Custom. The framerates shown
17356	// in the dropdown list are decimal approximations of fractions. If you choose
17357	// Custom, specify your frame rate as a fraction. If you are creating your transcoding
17358	// job specification as a JSON file without the console, use FramerateControl
17359	// to specify which value the service uses for the frame rate for this output.
17360	// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
17361	// from the input. Choose SPECIFIED if you want the service to use the frame
17362	// rate you specify in the settings FramerateNumerator and FramerateDenominator.
17363	FramerateControl *string `locationName:"framerateControl" type:"string" enum:"ProresFramerateControl"`
17364
17365	// Choose the method that you want MediaConvert to use when increasing or decreasing
17366	// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
17367	// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
17368	// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
17369	// smooth picture, but might introduce undesirable video artifacts. For complex
17370	// frame rate conversions, especially if your source video has already been
17371	// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
17372	// motion-compensated interpolation. FrameFormer chooses the best conversion
17373	// method frame by frame. Note that using FrameFormer increases the transcoding
17374	// time and incurs a significant add-on cost.
17375	FramerateConversionAlgorithm *string `locationName:"framerateConversionAlgorithm" type:"string" enum:"ProresFramerateConversionAlgorithm"`
17376
17377	// When you use the API for transcode jobs that use frame rate conversion, specify
17378	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
17379	// FramerateDenominator to specify the denominator of this fraction. In this
17380	// example, use 1001 for the value of FramerateDenominator. When you use the
17381	// console for transcode jobs that use frame rate conversion, provide the value
17382	// as a decimal number for Framerate. In this example, specify 23.976.
17383	FramerateDenominator *int64 `locationName:"framerateDenominator" min:"1" type:"integer"`
17384
17385	// When you use the API for transcode jobs that use frame rate conversion, specify
17386	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
17387	// FramerateNumerator to specify the numerator of this fraction. In this example,
17388	// use 24000 for the value of FramerateNumerator. When you use the console for
17389	// transcode jobs that use frame rate conversion, provide the value as a decimal
17390	// number for Framerate. In this example, specify 23.976.
17391	FramerateNumerator *int64 `locationName:"framerateNumerator" min:"1" type:"integer"`
17392
17393	// Choose the scan line type for the output. Keep the default value, Progressive
17394	// (PROGRESSIVE) to create a progressive output, regardless of the scan type
17395	// of your input. Use Top field first (TOP_FIELD) or Bottom field first (BOTTOM_FIELD)
17396	// to create an output that's interlaced with the same field polarity throughout.
17397	// Use Follow, default top (FOLLOW_TOP_FIELD) or Follow, default bottom (FOLLOW_BOTTOM_FIELD)
17398	// to produce outputs with the same field polarity as the source. For jobs that
17399	// have multiple inputs, the output field polarity might change over the course
17400	// of the output. Follow behavior depends on the input scan type. If the source
17401	// is interlaced, the output will be interlaced with the same polarity as the
17402	// source. If the source is progressive, the output will be interlaced with
17403	// top field bottom field first, depending on which of the Follow options you
17404	// choose.
17405	InterlaceMode *string `locationName:"interlaceMode" type:"string" enum:"ProresInterlaceMode"`
17406
17407	// Optional. Specify how the service determines the pixel aspect ratio (PAR)
17408	// for this output. The default behavior, Follow source (INITIALIZE_FROM_SOURCE),
17409	// uses the PAR from your input video for your output. To specify a different
17410	// PAR in the console, choose any value other than Follow source. To specify
17411	// a different PAR by editing the JSON job specification, choose SPECIFIED.
17412	// When you choose SPECIFIED for this setting, you must also specify values
17413	// for the parNumerator and parDenominator settings.
17414	ParControl *string `locationName:"parControl" type:"string" enum:"ProresParControl"`
17415
17416	// Required when you set Pixel aspect ratio (parControl) to SPECIFIED. On the
17417	// console, this corresponds to any value other than Follow source. When you
17418	// specify an output pixel aspect ratio (PAR) that is different from your input
17419	// video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC
17420	// widescreen, you would specify the ratio 40:33. In this example, the value
17421	// for parDenominator is 33.
17422	ParDenominator *int64 `locationName:"parDenominator" min:"1" type:"integer"`
17423
17424	// Required when you set Pixel aspect ratio (parControl) to SPECIFIED. On the
17425	// console, this corresponds to any value other than Follow source. When you
17426	// specify an output pixel aspect ratio (PAR) that is different from your input
17427	// video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC
17428	// widescreen, you would specify the ratio 40:33. In this example, the value
17429	// for parNumerator is 40.
17430	ParNumerator *int64 `locationName:"parNumerator" min:"1" type:"integer"`
17431
17432	// Use this setting for interlaced outputs, when your output frame rate is half
17433	// of your input frame rate. In this situation, choose Optimized interlacing
17434	// (INTERLACED_OPTIMIZE) to create a better quality interlaced output. In this
17435	// case, each progressive frame from the input corresponds to an interlaced
17436	// field in the output. Keep the default value, Basic interlacing (INTERLACED),
17437	// for all other output frame rates. With basic interlacing, MediaConvert performs
17438	// any frame rate conversion first and then interlaces the frames. When you
17439	// choose Optimized interlacing and you set your output frame rate to a value
17440	// that isn't suitable for optimized interlacing, MediaConvert automatically
17441	// falls back to basic interlacing. Required settings: To use optimized interlacing,
17442	// you must set Telecine (telecine) to None (NONE) or Soft (SOFT). You can't
17443	// use optimized interlacing for hard telecine outputs. You must also set Interlace
17444	// mode (interlaceMode) to a value other than Progressive (PROGRESSIVE).
17445	ScanTypeConversionMode *string `locationName:"scanTypeConversionMode" type:"string" enum:"ProresScanTypeConversionMode"`
17446
17447	// Ignore this setting unless your input frame rate is 23.976 or 24 frames per
17448	// second (fps). Enable slow PAL to create a 25 fps output. When you enable
17449	// slow PAL, MediaConvert relabels the video frames to 25 fps and resamples
17450	// your audio to keep it synchronized with the video. Note that enabling this
17451	// setting will slightly reduce the duration of your video. Required settings:
17452	// You must also set Framerate to 25. In your JSON job specification, set (framerateControl)
17453	// to (SPECIFIED), (framerateNumerator) to 25 and (framerateDenominator) to
17454	// 1.
17455	SlowPal *string `locationName:"slowPal" type:"string" enum:"ProresSlowPal"`
17456
17457	// When you do frame rate conversion from 23.976 frames per second (fps) to
17458	// 29.97 fps, and your output scan type is interlaced, you can optionally enable
17459	// hard telecine (HARD) to create a smoother picture. When you keep the default
17460	// value, None (NONE), MediaConvert does a standard frame rate conversion to
17461	// 29.97 without doing anything with the field polarity to create a smoother
17462	// picture.
17463	Telecine *string `locationName:"telecine" type:"string" enum:"ProresTelecine"`
17464}
17465
17466// String returns the string representation
17467func (s ProresSettings) String() string {
17468	return awsutil.Prettify(s)
17469}
17470
17471// GoString returns the string representation
17472func (s ProresSettings) GoString() string {
17473	return s.String()
17474}
17475
17476// Validate inspects the fields of the type to determine if they are valid.
17477func (s *ProresSettings) Validate() error {
17478	invalidParams := request.ErrInvalidParams{Context: "ProresSettings"}
17479	if s.FramerateDenominator != nil && *s.FramerateDenominator < 1 {
17480		invalidParams.Add(request.NewErrParamMinValue("FramerateDenominator", 1))
17481	}
17482	if s.FramerateNumerator != nil && *s.FramerateNumerator < 1 {
17483		invalidParams.Add(request.NewErrParamMinValue("FramerateNumerator", 1))
17484	}
17485	if s.ParDenominator != nil && *s.ParDenominator < 1 {
17486		invalidParams.Add(request.NewErrParamMinValue("ParDenominator", 1))
17487	}
17488	if s.ParNumerator != nil && *s.ParNumerator < 1 {
17489		invalidParams.Add(request.NewErrParamMinValue("ParNumerator", 1))
17490	}
17491
17492	if invalidParams.Len() > 0 {
17493		return invalidParams
17494	}
17495	return nil
17496}
17497
17498// SetCodecProfile sets the CodecProfile field's value.
17499func (s *ProresSettings) SetCodecProfile(v string) *ProresSettings {
17500	s.CodecProfile = &v
17501	return s
17502}
17503
17504// SetFramerateControl sets the FramerateControl field's value.
17505func (s *ProresSettings) SetFramerateControl(v string) *ProresSettings {
17506	s.FramerateControl = &v
17507	return s
17508}
17509
17510// SetFramerateConversionAlgorithm sets the FramerateConversionAlgorithm field's value.
17511func (s *ProresSettings) SetFramerateConversionAlgorithm(v string) *ProresSettings {
17512	s.FramerateConversionAlgorithm = &v
17513	return s
17514}
17515
17516// SetFramerateDenominator sets the FramerateDenominator field's value.
17517func (s *ProresSettings) SetFramerateDenominator(v int64) *ProresSettings {
17518	s.FramerateDenominator = &v
17519	return s
17520}
17521
17522// SetFramerateNumerator sets the FramerateNumerator field's value.
17523func (s *ProresSettings) SetFramerateNumerator(v int64) *ProresSettings {
17524	s.FramerateNumerator = &v
17525	return s
17526}
17527
17528// SetInterlaceMode sets the InterlaceMode field's value.
17529func (s *ProresSettings) SetInterlaceMode(v string) *ProresSettings {
17530	s.InterlaceMode = &v
17531	return s
17532}
17533
17534// SetParControl sets the ParControl field's value.
17535func (s *ProresSettings) SetParControl(v string) *ProresSettings {
17536	s.ParControl = &v
17537	return s
17538}
17539
17540// SetParDenominator sets the ParDenominator field's value.
17541func (s *ProresSettings) SetParDenominator(v int64) *ProresSettings {
17542	s.ParDenominator = &v
17543	return s
17544}
17545
17546// SetParNumerator sets the ParNumerator field's value.
17547func (s *ProresSettings) SetParNumerator(v int64) *ProresSettings {
17548	s.ParNumerator = &v
17549	return s
17550}
17551
17552// SetScanTypeConversionMode sets the ScanTypeConversionMode field's value.
17553func (s *ProresSettings) SetScanTypeConversionMode(v string) *ProresSettings {
17554	s.ScanTypeConversionMode = &v
17555	return s
17556}
17557
17558// SetSlowPal sets the SlowPal field's value.
17559func (s *ProresSettings) SetSlowPal(v string) *ProresSettings {
17560	s.SlowPal = &v
17561	return s
17562}
17563
17564// SetTelecine sets the Telecine field's value.
17565func (s *ProresSettings) SetTelecine(v string) *ProresSettings {
17566	s.Telecine = &v
17567	return s
17568}
17569
17570// You can use queues to manage the resources that are available to your AWS
17571// account for running multiple transcoding jobs at the same time. If you don't
17572// specify a queue, the service sends all jobs through the default queue. For
17573// more information, see https://docs.aws.amazon.com/mediaconvert/latest/ug/working-with-queues.html.
17574type Queue struct {
17575	_ struct{} `type:"structure"`
17576
17577	// An identifier for this resource that is unique within all of AWS.
17578	Arn *string `locationName:"arn" type:"string"`
17579
17580	// The timestamp in epoch seconds for when you created the queue.
17581	CreatedAt *time.Time `locationName:"createdAt" type:"timestamp" timestampFormat:"unixTimestamp"`
17582
17583	// An optional description that you create for each queue.
17584	Description *string `locationName:"description" type:"string"`
17585
17586	// The timestamp in epoch seconds for when you most recently updated the queue.
17587	LastUpdated *time.Time `locationName:"lastUpdated" type:"timestamp" timestampFormat:"unixTimestamp"`
17588
17589	// A name that you create for each queue. Each name must be unique within your
17590	// account.
17591	//
17592	// Name is a required field
17593	Name *string `locationName:"name" type:"string" required:"true"`
17594
17595	// Specifies whether the pricing plan for the queue is on-demand or reserved.
17596	// For on-demand, you pay per minute, billed in increments of .01 minute. For
17597	// reserved, you pay for the transcoding capacity of the entire queue, regardless
17598	// of how much or how little you use it. Reserved pricing requires a 12-month
17599	// commitment.
17600	PricingPlan *string `locationName:"pricingPlan" type:"string" enum:"PricingPlan"`
17601
17602	// The estimated number of jobs with a PROGRESSING status.
17603	ProgressingJobsCount *int64 `locationName:"progressingJobsCount" type:"integer"`
17604
17605	// Details about the pricing plan for your reserved queue. Required for reserved
17606	// queues and not applicable to on-demand queues.
17607	ReservationPlan *ReservationPlan `locationName:"reservationPlan" type:"structure"`
17608
17609	// Queues can be ACTIVE or PAUSED. If you pause a queue, the service won't begin
17610	// processing jobs in that queue. Jobs that are running when you pause the queue
17611	// continue to run until they finish or result in an error.
17612	Status *string `locationName:"status" type:"string" enum:"QueueStatus"`
17613
17614	// The estimated number of jobs with a SUBMITTED status.
17615	SubmittedJobsCount *int64 `locationName:"submittedJobsCount" type:"integer"`
17616
17617	// Specifies whether this on-demand queue is system or custom. System queues
17618	// are built in. You can't modify or delete system queues. You can create and
17619	// modify custom queues.
17620	Type *string `locationName:"type" type:"string" enum:"Type"`
17621}
17622
17623// String returns the string representation
17624func (s Queue) String() string {
17625	return awsutil.Prettify(s)
17626}
17627
17628// GoString returns the string representation
17629func (s Queue) GoString() string {
17630	return s.String()
17631}
17632
17633// SetArn sets the Arn field's value.
17634func (s *Queue) SetArn(v string) *Queue {
17635	s.Arn = &v
17636	return s
17637}
17638
17639// SetCreatedAt sets the CreatedAt field's value.
17640func (s *Queue) SetCreatedAt(v time.Time) *Queue {
17641	s.CreatedAt = &v
17642	return s
17643}
17644
17645// SetDescription sets the Description field's value.
17646func (s *Queue) SetDescription(v string) *Queue {
17647	s.Description = &v
17648	return s
17649}
17650
17651// SetLastUpdated sets the LastUpdated field's value.
17652func (s *Queue) SetLastUpdated(v time.Time) *Queue {
17653	s.LastUpdated = &v
17654	return s
17655}
17656
17657// SetName sets the Name field's value.
17658func (s *Queue) SetName(v string) *Queue {
17659	s.Name = &v
17660	return s
17661}
17662
17663// SetPricingPlan sets the PricingPlan field's value.
17664func (s *Queue) SetPricingPlan(v string) *Queue {
17665	s.PricingPlan = &v
17666	return s
17667}
17668
17669// SetProgressingJobsCount sets the ProgressingJobsCount field's value.
17670func (s *Queue) SetProgressingJobsCount(v int64) *Queue {
17671	s.ProgressingJobsCount = &v
17672	return s
17673}
17674
17675// SetReservationPlan sets the ReservationPlan field's value.
17676func (s *Queue) SetReservationPlan(v *ReservationPlan) *Queue {
17677	s.ReservationPlan = v
17678	return s
17679}
17680
17681// SetStatus sets the Status field's value.
17682func (s *Queue) SetStatus(v string) *Queue {
17683	s.Status = &v
17684	return s
17685}
17686
17687// SetSubmittedJobsCount sets the SubmittedJobsCount field's value.
17688func (s *Queue) SetSubmittedJobsCount(v int64) *Queue {
17689	s.SubmittedJobsCount = &v
17690	return s
17691}
17692
17693// SetType sets the Type field's value.
17694func (s *Queue) SetType(v string) *Queue {
17695	s.Type = &v
17696	return s
17697}
17698
17699// Description of the source and destination queues between which the job has
17700// moved, along with the timestamp of the move
17701type QueueTransition struct {
17702	_ struct{} `type:"structure"`
17703
17704	// The queue that the job was on after the transition.
17705	DestinationQueue *string `locationName:"destinationQueue" type:"string"`
17706
17707	// The queue that the job was on before the transition.
17708	SourceQueue *string `locationName:"sourceQueue" type:"string"`
17709
17710	// The time, in Unix epoch format, that the job moved from the source queue
17711	// to the destination queue.
17712	Timestamp *time.Time `locationName:"timestamp" type:"timestamp" timestampFormat:"unixTimestamp"`
17713}
17714
17715// String returns the string representation
17716func (s QueueTransition) String() string {
17717	return awsutil.Prettify(s)
17718}
17719
17720// GoString returns the string representation
17721func (s QueueTransition) GoString() string {
17722	return s.String()
17723}
17724
17725// SetDestinationQueue sets the DestinationQueue field's value.
17726func (s *QueueTransition) SetDestinationQueue(v string) *QueueTransition {
17727	s.DestinationQueue = &v
17728	return s
17729}
17730
17731// SetSourceQueue sets the SourceQueue field's value.
17732func (s *QueueTransition) SetSourceQueue(v string) *QueueTransition {
17733	s.SourceQueue = &v
17734	return s
17735}
17736
17737// SetTimestamp sets the Timestamp field's value.
17738func (s *QueueTransition) SetTimestamp(v time.Time) *QueueTransition {
17739	s.Timestamp = &v
17740	return s
17741}
17742
17743// Use Rectangle to identify a specific area of the video frame.
17744type Rectangle struct {
17745	_ struct{} `type:"structure"`
17746
17747	// Height of rectangle in pixels. Specify only even numbers.
17748	Height *int64 `locationName:"height" min:"2" type:"integer"`
17749
17750	// Width of rectangle in pixels. Specify only even numbers.
17751	Width *int64 `locationName:"width" min:"2" type:"integer"`
17752
17753	// The distance, in pixels, between the rectangle and the left edge of the video
17754	// frame. Specify only even numbers.
17755	X *int64 `locationName:"x" type:"integer"`
17756
17757	// The distance, in pixels, between the rectangle and the top edge of the video
17758	// frame. Specify only even numbers.
17759	Y *int64 `locationName:"y" type:"integer"`
17760}
17761
17762// String returns the string representation
17763func (s Rectangle) String() string {
17764	return awsutil.Prettify(s)
17765}
17766
17767// GoString returns the string representation
17768func (s Rectangle) GoString() string {
17769	return s.String()
17770}
17771
17772// Validate inspects the fields of the type to determine if they are valid.
17773func (s *Rectangle) Validate() error {
17774	invalidParams := request.ErrInvalidParams{Context: "Rectangle"}
17775	if s.Height != nil && *s.Height < 2 {
17776		invalidParams.Add(request.NewErrParamMinValue("Height", 2))
17777	}
17778	if s.Width != nil && *s.Width < 2 {
17779		invalidParams.Add(request.NewErrParamMinValue("Width", 2))
17780	}
17781
17782	if invalidParams.Len() > 0 {
17783		return invalidParams
17784	}
17785	return nil
17786}
17787
17788// SetHeight sets the Height field's value.
17789func (s *Rectangle) SetHeight(v int64) *Rectangle {
17790	s.Height = &v
17791	return s
17792}
17793
17794// SetWidth sets the Width field's value.
17795func (s *Rectangle) SetWidth(v int64) *Rectangle {
17796	s.Width = &v
17797	return s
17798}
17799
17800// SetX sets the X field's value.
17801func (s *Rectangle) SetX(v int64) *Rectangle {
17802	s.X = &v
17803	return s
17804}
17805
17806// SetY sets the Y field's value.
17807func (s *Rectangle) SetY(v int64) *Rectangle {
17808	s.Y = &v
17809	return s
17810}
17811
17812// Use Manual audio remixing (RemixSettings) to adjust audio levels for each
17813// audio channel in each output of your job. With audio remixing, you can output
17814// more or fewer audio channels than your input audio source provides.
17815type RemixSettings struct {
17816	_ struct{} `type:"structure"`
17817
17818	// Channel mapping (ChannelMapping) contains the group of fields that hold the
17819	// remixing value for each channel, in dB. Specify remix values to indicate
17820	// how much of the content from your input audio channel you want in your output
17821	// audio channels. Each instance of the InputChannels or InputChannelsFineTune
17822	// array specifies these values for one output channel. Use one instance of
17823	// this array for each output channel. In the console, each array corresponds
17824	// to a column in the graphical depiction of the mapping matrix. The rows of
17825	// the graphical matrix correspond to input channels. Valid values are within
17826	// the range from -60 (mute) through 6. A setting of 0 passes the input channel
17827	// unchanged to the output channel (no attenuation or amplification). Use InputChannels
17828	// or InputChannelsFineTune to specify your remix values. Don't use both.
17829	ChannelMapping *ChannelMapping `locationName:"channelMapping" type:"structure"`
17830
17831	// Specify the number of audio channels from your input that you want to use
17832	// in your output. With remixing, you might combine or split the data in these
17833	// channels, so the number of channels in your final output might be different.
17834	// If you are doing both input channel mapping and output channel mapping, the
17835	// number of output channels in your input mapping must be the same as the number
17836	// of input channels in your output mapping.
17837	ChannelsIn *int64 `locationName:"channelsIn" min:"1" type:"integer"`
17838
17839	// Specify the number of channels in this output after remixing. Valid values:
17840	// 1, 2, 4, 6, 8... 64. (1 and even numbers to 64.) If you are doing both input
17841	// channel mapping and output channel mapping, the number of output channels
17842	// in your input mapping must be the same as the number of input channels in
17843	// your output mapping.
17844	ChannelsOut *int64 `locationName:"channelsOut" min:"1" type:"integer"`
17845}
17846
17847// String returns the string representation
17848func (s RemixSettings) String() string {
17849	return awsutil.Prettify(s)
17850}
17851
17852// GoString returns the string representation
17853func (s RemixSettings) GoString() string {
17854	return s.String()
17855}
17856
17857// Validate inspects the fields of the type to determine if they are valid.
17858func (s *RemixSettings) Validate() error {
17859	invalidParams := request.ErrInvalidParams{Context: "RemixSettings"}
17860	if s.ChannelsIn != nil && *s.ChannelsIn < 1 {
17861		invalidParams.Add(request.NewErrParamMinValue("ChannelsIn", 1))
17862	}
17863	if s.ChannelsOut != nil && *s.ChannelsOut < 1 {
17864		invalidParams.Add(request.NewErrParamMinValue("ChannelsOut", 1))
17865	}
17866
17867	if invalidParams.Len() > 0 {
17868		return invalidParams
17869	}
17870	return nil
17871}
17872
17873// SetChannelMapping sets the ChannelMapping field's value.
17874func (s *RemixSettings) SetChannelMapping(v *ChannelMapping) *RemixSettings {
17875	s.ChannelMapping = v
17876	return s
17877}
17878
17879// SetChannelsIn sets the ChannelsIn field's value.
17880func (s *RemixSettings) SetChannelsIn(v int64) *RemixSettings {
17881	s.ChannelsIn = &v
17882	return s
17883}
17884
17885// SetChannelsOut sets the ChannelsOut field's value.
17886func (s *RemixSettings) SetChannelsOut(v int64) *RemixSettings {
17887	s.ChannelsOut = &v
17888	return s
17889}
17890
17891// Details about the pricing plan for your reserved queue. Required for reserved
17892// queues and not applicable to on-demand queues.
17893type ReservationPlan struct {
17894	_ struct{} `type:"structure"`
17895
17896	// The length of the term of your reserved queue pricing plan commitment.
17897	Commitment *string `locationName:"commitment" type:"string" enum:"Commitment"`
17898
17899	// The timestamp in epoch seconds for when the current pricing plan term for
17900	// this reserved queue expires.
17901	ExpiresAt *time.Time `locationName:"expiresAt" type:"timestamp" timestampFormat:"unixTimestamp"`
17902
17903	// The timestamp in epoch seconds for when you set up the current pricing plan
17904	// for this reserved queue.
17905	PurchasedAt *time.Time `locationName:"purchasedAt" type:"timestamp" timestampFormat:"unixTimestamp"`
17906
17907	// Specifies whether the term of your reserved queue pricing plan is automatically
17908	// extended (AUTO_RENEW) or expires (EXPIRE) at the end of the term.
17909	RenewalType *string `locationName:"renewalType" type:"string" enum:"RenewalType"`
17910
17911	// Specifies the number of reserved transcode slots (RTS) for this queue. The
17912	// number of RTS determines how many jobs the queue can process in parallel;
17913	// each RTS can process one job at a time. When you increase this number, you
17914	// extend your existing commitment with a new 12-month commitment for a larger
17915	// number of RTS. The new commitment begins when you purchase the additional
17916	// capacity. You can't decrease the number of RTS in your reserved queue.
17917	ReservedSlots *int64 `locationName:"reservedSlots" type:"integer"`
17918
17919	// Specifies whether the pricing plan for your reserved queue is ACTIVE or EXPIRED.
17920	Status *string `locationName:"status" type:"string" enum:"ReservationPlanStatus"`
17921}
17922
17923// String returns the string representation
17924func (s ReservationPlan) String() string {
17925	return awsutil.Prettify(s)
17926}
17927
17928// GoString returns the string representation
17929func (s ReservationPlan) GoString() string {
17930	return s.String()
17931}
17932
17933// SetCommitment sets the Commitment field's value.
17934func (s *ReservationPlan) SetCommitment(v string) *ReservationPlan {
17935	s.Commitment = &v
17936	return s
17937}
17938
17939// SetExpiresAt sets the ExpiresAt field's value.
17940func (s *ReservationPlan) SetExpiresAt(v time.Time) *ReservationPlan {
17941	s.ExpiresAt = &v
17942	return s
17943}
17944
17945// SetPurchasedAt sets the PurchasedAt field's value.
17946func (s *ReservationPlan) SetPurchasedAt(v time.Time) *ReservationPlan {
17947	s.PurchasedAt = &v
17948	return s
17949}
17950
17951// SetRenewalType sets the RenewalType field's value.
17952func (s *ReservationPlan) SetRenewalType(v string) *ReservationPlan {
17953	s.RenewalType = &v
17954	return s
17955}
17956
17957// SetReservedSlots sets the ReservedSlots field's value.
17958func (s *ReservationPlan) SetReservedSlots(v int64) *ReservationPlan {
17959	s.ReservedSlots = &v
17960	return s
17961}
17962
17963// SetStatus sets the Status field's value.
17964func (s *ReservationPlan) SetStatus(v string) *ReservationPlan {
17965	s.Status = &v
17966	return s
17967}
17968
17969// Details about the pricing plan for your reserved queue. Required for reserved
17970// queues and not applicable to on-demand queues.
17971type ReservationPlanSettings struct {
17972	_ struct{} `type:"structure"`
17973
17974	// The length of the term of your reserved queue pricing plan commitment.
17975	//
17976	// Commitment is a required field
17977	Commitment *string `locationName:"commitment" type:"string" required:"true" enum:"Commitment"`
17978
17979	// Specifies whether the term of your reserved queue pricing plan is automatically
17980	// extended (AUTO_RENEW) or expires (EXPIRE) at the end of the term. When your
17981	// term is auto renewed, you extend your commitment by 12 months from the auto
17982	// renew date. You can cancel this commitment.
17983	//
17984	// RenewalType is a required field
17985	RenewalType *string `locationName:"renewalType" type:"string" required:"true" enum:"RenewalType"`
17986
17987	// Specifies the number of reserved transcode slots (RTS) for this queue. The
17988	// number of RTS determines how many jobs the queue can process in parallel;
17989	// each RTS can process one job at a time. You can't decrease the number of
17990	// RTS in your reserved queue. You can increase the number of RTS by extending
17991	// your existing commitment with a new 12-month commitment for the larger number.
17992	// The new commitment begins when you purchase the additional capacity. You
17993	// can't cancel your commitment or revert to your original commitment after
17994	// you increase the capacity.
17995	//
17996	// ReservedSlots is a required field
17997	ReservedSlots *int64 `locationName:"reservedSlots" type:"integer" required:"true"`
17998}
17999
18000// String returns the string representation
18001func (s ReservationPlanSettings) String() string {
18002	return awsutil.Prettify(s)
18003}
18004
18005// GoString returns the string representation
18006func (s ReservationPlanSettings) GoString() string {
18007	return s.String()
18008}
18009
18010// Validate inspects the fields of the type to determine if they are valid.
18011func (s *ReservationPlanSettings) Validate() error {
18012	invalidParams := request.ErrInvalidParams{Context: "ReservationPlanSettings"}
18013	if s.Commitment == nil {
18014		invalidParams.Add(request.NewErrParamRequired("Commitment"))
18015	}
18016	if s.RenewalType == nil {
18017		invalidParams.Add(request.NewErrParamRequired("RenewalType"))
18018	}
18019	if s.ReservedSlots == nil {
18020		invalidParams.Add(request.NewErrParamRequired("ReservedSlots"))
18021	}
18022
18023	if invalidParams.Len() > 0 {
18024		return invalidParams
18025	}
18026	return nil
18027}
18028
18029// SetCommitment sets the Commitment field's value.
18030func (s *ReservationPlanSettings) SetCommitment(v string) *ReservationPlanSettings {
18031	s.Commitment = &v
18032	return s
18033}
18034
18035// SetRenewalType sets the RenewalType field's value.
18036func (s *ReservationPlanSettings) SetRenewalType(v string) *ReservationPlanSettings {
18037	s.RenewalType = &v
18038	return s
18039}
18040
18041// SetReservedSlots sets the ReservedSlots field's value.
18042func (s *ReservationPlanSettings) SetReservedSlots(v int64) *ReservationPlanSettings {
18043	s.ReservedSlots = &v
18044	return s
18045}
18046
18047// The Amazon Resource Name (ARN) and tags for an AWS Elemental MediaConvert
18048// resource.
18049type ResourceTags struct {
18050	_ struct{} `type:"structure"`
18051
18052	// The Amazon Resource Name (ARN) of the resource.
18053	Arn *string `locationName:"arn" type:"string"`
18054
18055	// The tags for the resource.
18056	Tags map[string]*string `locationName:"tags" type:"map"`
18057}
18058
18059// String returns the string representation
18060func (s ResourceTags) String() string {
18061	return awsutil.Prettify(s)
18062}
18063
18064// GoString returns the string representation
18065func (s ResourceTags) GoString() string {
18066	return s.String()
18067}
18068
18069// SetArn sets the Arn field's value.
18070func (s *ResourceTags) SetArn(v string) *ResourceTags {
18071	s.Arn = &v
18072	return s
18073}
18074
18075// SetTags sets the Tags field's value.
18076func (s *ResourceTags) SetTags(v map[string]*string) *ResourceTags {
18077	s.Tags = v
18078	return s
18079}
18080
18081// Optional. Have MediaConvert automatically apply Amazon S3 access control
18082// for the outputs in this output group. When you don't use this setting, S3
18083// automatically applies the default access control list PRIVATE.
18084type S3DestinationAccessControl struct {
18085	_ struct{} `type:"structure"`
18086
18087	// Choose an Amazon S3 canned ACL for MediaConvert to apply to this output.
18088	CannedAcl *string `locationName:"cannedAcl" type:"string" enum:"S3ObjectCannedAcl"`
18089}
18090
18091// String returns the string representation
18092func (s S3DestinationAccessControl) String() string {
18093	return awsutil.Prettify(s)
18094}
18095
18096// GoString returns the string representation
18097func (s S3DestinationAccessControl) GoString() string {
18098	return s.String()
18099}
18100
18101// SetCannedAcl sets the CannedAcl field's value.
18102func (s *S3DestinationAccessControl) SetCannedAcl(v string) *S3DestinationAccessControl {
18103	s.CannedAcl = &v
18104	return s
18105}
18106
18107// Settings associated with S3 destination
18108type S3DestinationSettings struct {
18109	_ struct{} `type:"structure"`
18110
18111	// Optional. Have MediaConvert automatically apply Amazon S3 access control
18112	// for the outputs in this output group. When you don't use this setting, S3
18113	// automatically applies the default access control list PRIVATE.
18114	AccessControl *S3DestinationAccessControl `locationName:"accessControl" type:"structure"`
18115
18116	// Settings for how your job outputs are encrypted as they are uploaded to Amazon
18117	// S3.
18118	Encryption *S3EncryptionSettings `locationName:"encryption" type:"structure"`
18119}
18120
18121// String returns the string representation
18122func (s S3DestinationSettings) String() string {
18123	return awsutil.Prettify(s)
18124}
18125
18126// GoString returns the string representation
18127func (s S3DestinationSettings) GoString() string {
18128	return s.String()
18129}
18130
18131// SetAccessControl sets the AccessControl field's value.
18132func (s *S3DestinationSettings) SetAccessControl(v *S3DestinationAccessControl) *S3DestinationSettings {
18133	s.AccessControl = v
18134	return s
18135}
18136
18137// SetEncryption sets the Encryption field's value.
18138func (s *S3DestinationSettings) SetEncryption(v *S3EncryptionSettings) *S3DestinationSettings {
18139	s.Encryption = v
18140	return s
18141}
18142
18143// Settings for how your job outputs are encrypted as they are uploaded to Amazon
18144// S3.
18145type S3EncryptionSettings struct {
18146	_ struct{} `type:"structure"`
18147
18148	// Specify how you want your data keys managed. AWS uses data keys to encrypt
18149	// your content. AWS also encrypts the data keys themselves, using a customer
18150	// master key (CMK), and then stores the encrypted data keys alongside your
18151	// encrypted content. Use this setting to specify which AWS service manages
18152	// the CMK. For simplest set up, choose Amazon S3 (SERVER_SIDE_ENCRYPTION_S3).
18153	// If you want your master key to be managed by AWS Key Management Service (KMS),
18154	// choose AWS KMS (SERVER_SIDE_ENCRYPTION_KMS). By default, when you choose
18155	// AWS KMS, KMS uses the AWS managed customer master key (CMK) associated with
18156	// Amazon S3 to encrypt your data keys. You can optionally choose to specify
18157	// a different, customer managed CMK. Do so by specifying the Amazon Resource
18158	// Name (ARN) of the key for the setting KMS ARN (kmsKeyArn).
18159	EncryptionType *string `locationName:"encryptionType" type:"string" enum:"S3ServerSideEncryptionType"`
18160
18161	// Optionally, specify the customer master key (CMK) that you want to use to
18162	// encrypt the data key that AWS uses to encrypt your output content. Enter
18163	// the Amazon Resource Name (ARN) of the CMK. To use this setting, you must
18164	// also set Server-side encryption (S3ServerSideEncryptionType) to AWS KMS (SERVER_SIDE_ENCRYPTION_KMS).
18165	// If you set Server-side encryption to AWS KMS but don't specify a CMK here,
18166	// AWS uses the AWS managed CMK associated with Amazon S3.
18167	KmsKeyArn *string `locationName:"kmsKeyArn" type:"string"`
18168}
18169
18170// String returns the string representation
18171func (s S3EncryptionSettings) String() string {
18172	return awsutil.Prettify(s)
18173}
18174
18175// GoString returns the string representation
18176func (s S3EncryptionSettings) GoString() string {
18177	return s.String()
18178}
18179
18180// SetEncryptionType sets the EncryptionType field's value.
18181func (s *S3EncryptionSettings) SetEncryptionType(v string) *S3EncryptionSettings {
18182	s.EncryptionType = &v
18183	return s
18184}
18185
18186// SetKmsKeyArn sets the KmsKeyArn field's value.
18187func (s *S3EncryptionSettings) SetKmsKeyArn(v string) *S3EncryptionSettings {
18188	s.KmsKeyArn = &v
18189	return s
18190}
18191
18192// Settings for SCC caption output.
18193type SccDestinationSettings struct {
18194	_ struct{} `type:"structure"`
18195
18196	// Set Framerate (SccDestinationFramerate) to make sure that the captions and
18197	// the video are synchronized in the output. Specify a frame rate that matches
18198	// the frame rate of the associated video. If the video frame rate is 29.97,
18199	// choose 29.97 dropframe (FRAMERATE_29_97_DROPFRAME) only if the video has
18200	// video_insertion=true and drop_frame_timecode=true; otherwise, choose 29.97
18201	// non-dropframe (FRAMERATE_29_97_NON_DROPFRAME).
18202	Framerate *string `locationName:"framerate" type:"string" enum:"SccDestinationFramerate"`
18203}
18204
18205// String returns the string representation
18206func (s SccDestinationSettings) String() string {
18207	return awsutil.Prettify(s)
18208}
18209
18210// GoString returns the string representation
18211func (s SccDestinationSettings) GoString() string {
18212	return s.String()
18213}
18214
18215// SetFramerate sets the Framerate field's value.
18216func (s *SccDestinationSettings) SetFramerate(v string) *SccDestinationSettings {
18217	s.Framerate = &v
18218	return s
18219}
18220
18221// If your output group type is HLS, DASH, or Microsoft Smooth, use these settings
18222// when doing DRM encryption with a SPEKE-compliant key provider. If your output
18223// group type is CMAF, use the SpekeKeyProviderCmaf settings instead.
18224type SpekeKeyProvider struct {
18225	_ struct{} `type:"structure"`
18226
18227	// If you want your key provider to encrypt the content keys that it provides
18228	// to MediaConvert, set up a certificate with a master key using AWS Certificate
18229	// Manager. Specify the certificate's Amazon Resource Name (ARN) here.
18230	CertificateArn *string `locationName:"certificateArn" type:"string"`
18231
18232	// Specify the resource ID that your SPEKE-compliant key provider uses to identify
18233	// this content.
18234	ResourceId *string `locationName:"resourceId" type:"string"`
18235
18236	// Relates to SPEKE implementation. DRM system identifiers. DASH output groups
18237	// support a max of two system ids. Other group types support one system id.
18238	// See https://dashif.org/identifiers/content_protection/ for more details.
18239	SystemIds []*string `locationName:"systemIds" type:"list"`
18240
18241	// Specify the URL to the key server that your SPEKE-compliant DRM key provider
18242	// uses to provide keys for encrypting your content.
18243	Url *string `locationName:"url" type:"string"`
18244}
18245
18246// String returns the string representation
18247func (s SpekeKeyProvider) String() string {
18248	return awsutil.Prettify(s)
18249}
18250
18251// GoString returns the string representation
18252func (s SpekeKeyProvider) GoString() string {
18253	return s.String()
18254}
18255
18256// SetCertificateArn sets the CertificateArn field's value.
18257func (s *SpekeKeyProvider) SetCertificateArn(v string) *SpekeKeyProvider {
18258	s.CertificateArn = &v
18259	return s
18260}
18261
18262// SetResourceId sets the ResourceId field's value.
18263func (s *SpekeKeyProvider) SetResourceId(v string) *SpekeKeyProvider {
18264	s.ResourceId = &v
18265	return s
18266}
18267
18268// SetSystemIds sets the SystemIds field's value.
18269func (s *SpekeKeyProvider) SetSystemIds(v []*string) *SpekeKeyProvider {
18270	s.SystemIds = v
18271	return s
18272}
18273
18274// SetUrl sets the Url field's value.
18275func (s *SpekeKeyProvider) SetUrl(v string) *SpekeKeyProvider {
18276	s.Url = &v
18277	return s
18278}
18279
18280// If your output group type is CMAF, use these settings when doing DRM encryption
18281// with a SPEKE-compliant key provider. If your output group type is HLS, DASH,
18282// or Microsoft Smooth, use the SpekeKeyProvider settings instead.
18283type SpekeKeyProviderCmaf struct {
18284	_ struct{} `type:"structure"`
18285
18286	// If you want your key provider to encrypt the content keys that it provides
18287	// to MediaConvert, set up a certificate with a master key using AWS Certificate
18288	// Manager. Specify the certificate's Amazon Resource Name (ARN) here.
18289	CertificateArn *string `locationName:"certificateArn" type:"string"`
18290
18291	// Specify the DRM system IDs that you want signaled in the DASH manifest that
18292	// MediaConvert creates as part of this CMAF package. The DASH manifest can
18293	// currently signal up to three system IDs. For more information, see https://dashif.org/identifiers/content_protection/.
18294	DashSignaledSystemIds []*string `locationName:"dashSignaledSystemIds" type:"list"`
18295
18296	// Specify the DRM system ID that you want signaled in the HLS manifest that
18297	// MediaConvert creates as part of this CMAF package. The HLS manifest can currently
18298	// signal only one system ID. For more information, see https://dashif.org/identifiers/content_protection/.
18299	HlsSignaledSystemIds []*string `locationName:"hlsSignaledSystemIds" type:"list"`
18300
18301	// Specify the resource ID that your SPEKE-compliant key provider uses to identify
18302	// this content.
18303	ResourceId *string `locationName:"resourceId" type:"string"`
18304
18305	// Specify the URL to the key server that your SPEKE-compliant DRM key provider
18306	// uses to provide keys for encrypting your content.
18307	Url *string `locationName:"url" type:"string"`
18308}
18309
18310// String returns the string representation
18311func (s SpekeKeyProviderCmaf) String() string {
18312	return awsutil.Prettify(s)
18313}
18314
18315// GoString returns the string representation
18316func (s SpekeKeyProviderCmaf) GoString() string {
18317	return s.String()
18318}
18319
18320// SetCertificateArn sets the CertificateArn field's value.
18321func (s *SpekeKeyProviderCmaf) SetCertificateArn(v string) *SpekeKeyProviderCmaf {
18322	s.CertificateArn = &v
18323	return s
18324}
18325
18326// SetDashSignaledSystemIds sets the DashSignaledSystemIds field's value.
18327func (s *SpekeKeyProviderCmaf) SetDashSignaledSystemIds(v []*string) *SpekeKeyProviderCmaf {
18328	s.DashSignaledSystemIds = v
18329	return s
18330}
18331
18332// SetHlsSignaledSystemIds sets the HlsSignaledSystemIds field's value.
18333func (s *SpekeKeyProviderCmaf) SetHlsSignaledSystemIds(v []*string) *SpekeKeyProviderCmaf {
18334	s.HlsSignaledSystemIds = v
18335	return s
18336}
18337
18338// SetResourceId sets the ResourceId field's value.
18339func (s *SpekeKeyProviderCmaf) SetResourceId(v string) *SpekeKeyProviderCmaf {
18340	s.ResourceId = &v
18341	return s
18342}
18343
18344// SetUrl sets the Url field's value.
18345func (s *SpekeKeyProviderCmaf) SetUrl(v string) *SpekeKeyProviderCmaf {
18346	s.Url = &v
18347	return s
18348}
18349
18350// Use these settings to set up encryption with a static key provider.
18351type StaticKeyProvider struct {
18352	_ struct{} `type:"structure"`
18353
18354	// Relates to DRM implementation. Sets the value of the KEYFORMAT attribute.
18355	// Must be 'identity' or a reverse DNS string. May be omitted to indicate an
18356	// implicit value of 'identity'.
18357	KeyFormat *string `locationName:"keyFormat" type:"string"`
18358
18359	// Relates to DRM implementation. Either a single positive integer version value
18360	// or a slash delimited list of version values (1/2/3).
18361	KeyFormatVersions *string `locationName:"keyFormatVersions" type:"string"`
18362
18363	// Relates to DRM implementation. Use a 32-character hexidecimal string to specify
18364	// Key Value (StaticKeyValue).
18365	StaticKeyValue *string `locationName:"staticKeyValue" type:"string"`
18366
18367	// Relates to DRM implementation. The location of the license server used for
18368	// protecting content.
18369	Url *string `locationName:"url" type:"string"`
18370}
18371
18372// String returns the string representation
18373func (s StaticKeyProvider) String() string {
18374	return awsutil.Prettify(s)
18375}
18376
18377// GoString returns the string representation
18378func (s StaticKeyProvider) GoString() string {
18379	return s.String()
18380}
18381
18382// SetKeyFormat sets the KeyFormat field's value.
18383func (s *StaticKeyProvider) SetKeyFormat(v string) *StaticKeyProvider {
18384	s.KeyFormat = &v
18385	return s
18386}
18387
18388// SetKeyFormatVersions sets the KeyFormatVersions field's value.
18389func (s *StaticKeyProvider) SetKeyFormatVersions(v string) *StaticKeyProvider {
18390	s.KeyFormatVersions = &v
18391	return s
18392}
18393
18394// SetStaticKeyValue sets the StaticKeyValue field's value.
18395func (s *StaticKeyProvider) SetStaticKeyValue(v string) *StaticKeyProvider {
18396	s.StaticKeyValue = &v
18397	return s
18398}
18399
18400// SetUrl sets the Url field's value.
18401func (s *StaticKeyProvider) SetUrl(v string) *StaticKeyProvider {
18402	s.Url = &v
18403	return s
18404}
18405
18406// To add tags to a queue, preset, or job template, send a request with the
18407// Amazon Resource Name (ARN) of the resource and the tags that you want to
18408// add.
18409type TagResourceInput struct {
18410	_ struct{} `type:"structure"`
18411
18412	// The Amazon Resource Name (ARN) of the resource that you want to tag. To get
18413	// the ARN, send a GET request with the resource name.
18414	//
18415	// Arn is a required field
18416	Arn *string `locationName:"arn" type:"string" required:"true"`
18417
18418	// The tags that you want to add to the resource. You can tag resources with
18419	// a key-value pair or with only a key.
18420	//
18421	// Tags is a required field
18422	Tags map[string]*string `locationName:"tags" type:"map" required:"true"`
18423}
18424
18425// String returns the string representation
18426func (s TagResourceInput) String() string {
18427	return awsutil.Prettify(s)
18428}
18429
18430// GoString returns the string representation
18431func (s TagResourceInput) GoString() string {
18432	return s.String()
18433}
18434
18435// Validate inspects the fields of the type to determine if they are valid.
18436func (s *TagResourceInput) Validate() error {
18437	invalidParams := request.ErrInvalidParams{Context: "TagResourceInput"}
18438	if s.Arn == nil {
18439		invalidParams.Add(request.NewErrParamRequired("Arn"))
18440	}
18441	if s.Tags == nil {
18442		invalidParams.Add(request.NewErrParamRequired("Tags"))
18443	}
18444
18445	if invalidParams.Len() > 0 {
18446		return invalidParams
18447	}
18448	return nil
18449}
18450
18451// SetArn sets the Arn field's value.
18452func (s *TagResourceInput) SetArn(v string) *TagResourceInput {
18453	s.Arn = &v
18454	return s
18455}
18456
18457// SetTags sets the Tags field's value.
18458func (s *TagResourceInput) SetTags(v map[string]*string) *TagResourceInput {
18459	s.Tags = v
18460	return s
18461}
18462
18463// A successful request to add tags to a resource returns an OK message.
18464type TagResourceOutput struct {
18465	_ struct{} `type:"structure"`
18466}
18467
18468// String returns the string representation
18469func (s TagResourceOutput) String() string {
18470	return awsutil.Prettify(s)
18471}
18472
18473// GoString returns the string representation
18474func (s TagResourceOutput) GoString() string {
18475	return s.String()
18476}
18477
18478// Settings for Teletext caption output
18479type TeletextDestinationSettings struct {
18480	_ struct{} `type:"structure"`
18481
18482	// Set pageNumber to the Teletext page number for the destination captions for
18483	// this output. This value must be a three-digit hexadecimal string; strings
18484	// ending in -FF are invalid. If you are passing through the entire set of Teletext
18485	// data, do not use this field.
18486	PageNumber *string `locationName:"pageNumber" min:"3" type:"string"`
18487
18488	// Specify the page types for this Teletext page. If you don't specify a value
18489	// here, the service sets the page type to the default value Subtitle (PAGE_TYPE_SUBTITLE).
18490	// If you pass through the entire set of Teletext data, don't use this field.
18491	// When you pass through a set of Teletext pages, your output has the same page
18492	// types as your input.
18493	PageTypes []*string `locationName:"pageTypes" type:"list"`
18494}
18495
18496// String returns the string representation
18497func (s TeletextDestinationSettings) String() string {
18498	return awsutil.Prettify(s)
18499}
18500
18501// GoString returns the string representation
18502func (s TeletextDestinationSettings) GoString() string {
18503	return s.String()
18504}
18505
18506// Validate inspects the fields of the type to determine if they are valid.
18507func (s *TeletextDestinationSettings) Validate() error {
18508	invalidParams := request.ErrInvalidParams{Context: "TeletextDestinationSettings"}
18509	if s.PageNumber != nil && len(*s.PageNumber) < 3 {
18510		invalidParams.Add(request.NewErrParamMinLen("PageNumber", 3))
18511	}
18512
18513	if invalidParams.Len() > 0 {
18514		return invalidParams
18515	}
18516	return nil
18517}
18518
18519// SetPageNumber sets the PageNumber field's value.
18520func (s *TeletextDestinationSettings) SetPageNumber(v string) *TeletextDestinationSettings {
18521	s.PageNumber = &v
18522	return s
18523}
18524
18525// SetPageTypes sets the PageTypes field's value.
18526func (s *TeletextDestinationSettings) SetPageTypes(v []*string) *TeletextDestinationSettings {
18527	s.PageTypes = v
18528	return s
18529}
18530
18531// Settings specific to Teletext caption sources, including Page number.
18532type TeletextSourceSettings struct {
18533	_ struct{} `type:"structure"`
18534
18535	// Use Page Number (PageNumber) to specify the three-digit hexadecimal page
18536	// number that will be used for Teletext captions. Do not use this setting if
18537	// you are passing through teletext from the input source to output.
18538	PageNumber *string `locationName:"pageNumber" min:"3" type:"string"`
18539}
18540
18541// String returns the string representation
18542func (s TeletextSourceSettings) String() string {
18543	return awsutil.Prettify(s)
18544}
18545
18546// GoString returns the string representation
18547func (s TeletextSourceSettings) GoString() string {
18548	return s.String()
18549}
18550
18551// Validate inspects the fields of the type to determine if they are valid.
18552func (s *TeletextSourceSettings) Validate() error {
18553	invalidParams := request.ErrInvalidParams{Context: "TeletextSourceSettings"}
18554	if s.PageNumber != nil && len(*s.PageNumber) < 3 {
18555		invalidParams.Add(request.NewErrParamMinLen("PageNumber", 3))
18556	}
18557
18558	if invalidParams.Len() > 0 {
18559		return invalidParams
18560	}
18561	return nil
18562}
18563
18564// SetPageNumber sets the PageNumber field's value.
18565func (s *TeletextSourceSettings) SetPageNumber(v string) *TeletextSourceSettings {
18566	s.PageNumber = &v
18567	return s
18568}
18569
18570// Timecode burn-in (TimecodeBurnIn)--Burns the output timecode and specified
18571// prefix into the output.
18572type TimecodeBurnin struct {
18573	_ struct{} `type:"structure"`
18574
18575	// Use Font Size (FontSize) to set the font size of any burned-in timecode.
18576	// Valid values are 10, 16, 32, 48.
18577	FontSize *int64 `locationName:"fontSize" min:"10" type:"integer"`
18578
18579	// Use Position (Position) under under Timecode burn-in (TimecodeBurnIn) to
18580	// specify the location the burned-in timecode on output video.
18581	Position *string `locationName:"position" type:"string" enum:"TimecodeBurninPosition"`
18582
18583	// Use Prefix (Prefix) to place ASCII characters before any burned-in timecode.
18584	// For example, a prefix of "EZ-" will result in the timecode "EZ-00:00:00:00".
18585	// Provide either the characters themselves or the ASCII code equivalents. The
18586	// supported range of characters is 0x20 through 0x7e. This includes letters,
18587	// numbers, and all special characters represented on a standard English keyboard.
18588	Prefix *string `locationName:"prefix" type:"string"`
18589}
18590
18591// String returns the string representation
18592func (s TimecodeBurnin) String() string {
18593	return awsutil.Prettify(s)
18594}
18595
18596// GoString returns the string representation
18597func (s TimecodeBurnin) GoString() string {
18598	return s.String()
18599}
18600
18601// Validate inspects the fields of the type to determine if they are valid.
18602func (s *TimecodeBurnin) Validate() error {
18603	invalidParams := request.ErrInvalidParams{Context: "TimecodeBurnin"}
18604	if s.FontSize != nil && *s.FontSize < 10 {
18605		invalidParams.Add(request.NewErrParamMinValue("FontSize", 10))
18606	}
18607
18608	if invalidParams.Len() > 0 {
18609		return invalidParams
18610	}
18611	return nil
18612}
18613
18614// SetFontSize sets the FontSize field's value.
18615func (s *TimecodeBurnin) SetFontSize(v int64) *TimecodeBurnin {
18616	s.FontSize = &v
18617	return s
18618}
18619
18620// SetPosition sets the Position field's value.
18621func (s *TimecodeBurnin) SetPosition(v string) *TimecodeBurnin {
18622	s.Position = &v
18623	return s
18624}
18625
18626// SetPrefix sets the Prefix field's value.
18627func (s *TimecodeBurnin) SetPrefix(v string) *TimecodeBurnin {
18628	s.Prefix = &v
18629	return s
18630}
18631
18632// These settings control how the service handles timecodes throughout the job.
18633// These settings don't affect input clipping.
18634type TimecodeConfig struct {
18635	_ struct{} `type:"structure"`
18636
18637	// If you use an editing platform that relies on an anchor timecode, use Anchor
18638	// Timecode (Anchor) to specify a timecode that will match the input video frame
18639	// to the output video frame. Use 24-hour format with frame number, (HH:MM:SS:FF)
18640	// or (HH:MM:SS;FF). This setting ignores frame rate conversion. System behavior
18641	// for Anchor Timecode varies depending on your setting for Source (TimecodeSource).
18642	// * If Source (TimecodeSource) is set to Specified Start (SPECIFIEDSTART),
18643	// the first input frame is the specified value in Start Timecode (Start). Anchor
18644	// Timecode (Anchor) and Start Timecode (Start) are used calculate output timecode.
18645	// * If Source (TimecodeSource) is set to Start at 0 (ZEROBASED) the first frame
18646	// is 00:00:00:00. * If Source (TimecodeSource) is set to Embedded (EMBEDDED),
18647	// the first frame is the timecode value on the first input frame of the input.
18648	Anchor *string `locationName:"anchor" type:"string"`
18649
18650	// Use Source (TimecodeSource) to set how timecodes are handled within this
18651	// job. To make sure that your video, audio, captions, and markers are synchronized
18652	// and that time-based features, such as image inserter, work correctly, choose
18653	// the Timecode source option that matches your assets. All timecodes are in
18654	// a 24-hour format with frame number (HH:MM:SS:FF). * Embedded (EMBEDDED) -
18655	// Use the timecode that is in the input video. If no embedded timecode is in
18656	// the source, the service will use Start at 0 (ZEROBASED) instead. * Start
18657	// at 0 (ZEROBASED) - Set the timecode of the initial frame to 00:00:00:00.
18658	// * Specified Start (SPECIFIEDSTART) - Set the timecode of the initial frame
18659	// to a value other than zero. You use Start timecode (Start) to provide this
18660	// value.
18661	Source *string `locationName:"source" type:"string" enum:"TimecodeSource"`
18662
18663	// Only use when you set Source (TimecodeSource) to Specified start (SPECIFIEDSTART).
18664	// Use Start timecode (Start) to specify the timecode for the initial frame.
18665	// Use 24-hour format with frame number, (HH:MM:SS:FF) or (HH:MM:SS;FF).
18666	Start *string `locationName:"start" type:"string"`
18667
18668	// Only applies to outputs that support program-date-time stamp. Use Timestamp
18669	// offset (TimestampOffset) to overwrite the timecode date without affecting
18670	// the time and frame number. Provide the new date as a string in the format
18671	// "yyyy-mm-dd". To use Time stamp offset, you must also enable Insert program-date-time
18672	// (InsertProgramDateTime) in the output settings. For example, if the date
18673	// part of your timecodes is 2002-1-25 and you want to change it to one year
18674	// later, set Timestamp offset (TimestampOffset) to 2003-1-25.
18675	TimestampOffset *string `locationName:"timestampOffset" type:"string"`
18676}
18677
18678// String returns the string representation
18679func (s TimecodeConfig) String() string {
18680	return awsutil.Prettify(s)
18681}
18682
18683// GoString returns the string representation
18684func (s TimecodeConfig) GoString() string {
18685	return s.String()
18686}
18687
18688// SetAnchor sets the Anchor field's value.
18689func (s *TimecodeConfig) SetAnchor(v string) *TimecodeConfig {
18690	s.Anchor = &v
18691	return s
18692}
18693
18694// SetSource sets the Source field's value.
18695func (s *TimecodeConfig) SetSource(v string) *TimecodeConfig {
18696	s.Source = &v
18697	return s
18698}
18699
18700// SetStart sets the Start field's value.
18701func (s *TimecodeConfig) SetStart(v string) *TimecodeConfig {
18702	s.Start = &v
18703	return s
18704}
18705
18706// SetTimestampOffset sets the TimestampOffset field's value.
18707func (s *TimecodeConfig) SetTimestampOffset(v string) *TimecodeConfig {
18708	s.TimestampOffset = &v
18709	return s
18710}
18711
18712// Enable Timed metadata insertion (TimedMetadataInsertion) to include ID3 tags
18713// in any HLS outputs. To include timed metadata, you must enable it here, enable
18714// it in each output container, and specify tags and timecodes in ID3 insertion
18715// (Id3Insertion) objects.
18716type TimedMetadataInsertion struct {
18717	_ struct{} `type:"structure"`
18718
18719	// Id3Insertions contains the array of Id3Insertion instances.
18720	Id3Insertions []*Id3Insertion `locationName:"id3Insertions" type:"list"`
18721}
18722
18723// String returns the string representation
18724func (s TimedMetadataInsertion) String() string {
18725	return awsutil.Prettify(s)
18726}
18727
18728// GoString returns the string representation
18729func (s TimedMetadataInsertion) GoString() string {
18730	return s.String()
18731}
18732
18733// SetId3Insertions sets the Id3Insertions field's value.
18734func (s *TimedMetadataInsertion) SetId3Insertions(v []*Id3Insertion) *TimedMetadataInsertion {
18735	s.Id3Insertions = v
18736	return s
18737}
18738
18739// Information about when jobs are submitted, started, and finished is specified
18740// in Unix epoch format in seconds.
18741type Timing struct {
18742	_ struct{} `type:"structure"`
18743
18744	// The time, in Unix epoch format, that the transcoding job finished
18745	FinishTime *time.Time `locationName:"finishTime" type:"timestamp" timestampFormat:"unixTimestamp"`
18746
18747	// The time, in Unix epoch format, that transcoding for the job began.
18748	StartTime *time.Time `locationName:"startTime" type:"timestamp" timestampFormat:"unixTimestamp"`
18749
18750	// The time, in Unix epoch format, that you submitted the job.
18751	SubmitTime *time.Time `locationName:"submitTime" type:"timestamp" timestampFormat:"unixTimestamp"`
18752}
18753
18754// String returns the string representation
18755func (s Timing) String() string {
18756	return awsutil.Prettify(s)
18757}
18758
18759// GoString returns the string representation
18760func (s Timing) GoString() string {
18761	return s.String()
18762}
18763
18764// SetFinishTime sets the FinishTime field's value.
18765func (s *Timing) SetFinishTime(v time.Time) *Timing {
18766	s.FinishTime = &v
18767	return s
18768}
18769
18770// SetStartTime sets the StartTime field's value.
18771func (s *Timing) SetStartTime(v time.Time) *Timing {
18772	s.StartTime = &v
18773	return s
18774}
18775
18776// SetSubmitTime sets the SubmitTime field's value.
18777func (s *Timing) SetSubmitTime(v time.Time) *Timing {
18778	s.SubmitTime = &v
18779	return s
18780}
18781
18782type TooManyRequestsException struct {
18783	_            struct{}                  `type:"structure"`
18784	RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"`
18785
18786	Message_ *string `locationName:"message" type:"string"`
18787}
18788
18789// String returns the string representation
18790func (s TooManyRequestsException) String() string {
18791	return awsutil.Prettify(s)
18792}
18793
18794// GoString returns the string representation
18795func (s TooManyRequestsException) GoString() string {
18796	return s.String()
18797}
18798
18799func newErrorTooManyRequestsException(v protocol.ResponseMetadata) error {
18800	return &TooManyRequestsException{
18801		RespMetadata: v,
18802	}
18803}
18804
18805// Code returns the exception type name.
18806func (s *TooManyRequestsException) Code() string {
18807	return "TooManyRequestsException"
18808}
18809
18810// Message returns the exception's message.
18811func (s *TooManyRequestsException) Message() string {
18812	if s.Message_ != nil {
18813		return *s.Message_
18814	}
18815	return ""
18816}
18817
18818// OrigErr always returns nil, satisfies awserr.Error interface.
18819func (s *TooManyRequestsException) OrigErr() error {
18820	return nil
18821}
18822
18823func (s *TooManyRequestsException) Error() string {
18824	return fmt.Sprintf("%s: %s", s.Code(), s.Message())
18825}
18826
18827// Status code returns the HTTP status code for the request's response error.
18828func (s *TooManyRequestsException) StatusCode() int {
18829	return s.RespMetadata.StatusCode
18830}
18831
18832// RequestID returns the service's response RequestID for request.
18833func (s *TooManyRequestsException) RequestID() string {
18834	return s.RespMetadata.RequestID
18835}
18836
18837// Settings specific to caption sources that are specified by track number.
18838// Currently, this is only IMSC captions in an IMF package. If your caption
18839// source is IMSC 1.1 in a separate xml file, use FileSourceSettings instead
18840// of TrackSourceSettings.
18841type TrackSourceSettings struct {
18842	_ struct{} `type:"structure"`
18843
18844	// Use this setting to select a single captions track from a source. Track numbers
18845	// correspond to the order in the captions source file. For IMF sources, track
18846	// numbering is based on the order that the captions appear in the CPL. For
18847	// example, use 1 to select the captions asset that is listed first in the CPL.
18848	// To include more than one captions track in your job outputs, create multiple
18849	// input captions selectors. Specify one track per selector.
18850	TrackNumber *int64 `locationName:"trackNumber" min:"1" type:"integer"`
18851}
18852
18853// String returns the string representation
18854func (s TrackSourceSettings) String() string {
18855	return awsutil.Prettify(s)
18856}
18857
18858// GoString returns the string representation
18859func (s TrackSourceSettings) GoString() string {
18860	return s.String()
18861}
18862
18863// Validate inspects the fields of the type to determine if they are valid.
18864func (s *TrackSourceSettings) Validate() error {
18865	invalidParams := request.ErrInvalidParams{Context: "TrackSourceSettings"}
18866	if s.TrackNumber != nil && *s.TrackNumber < 1 {
18867		invalidParams.Add(request.NewErrParamMinValue("TrackNumber", 1))
18868	}
18869
18870	if invalidParams.Len() > 0 {
18871		return invalidParams
18872	}
18873	return nil
18874}
18875
18876// SetTrackNumber sets the TrackNumber field's value.
18877func (s *TrackSourceSettings) SetTrackNumber(v int64) *TrackSourceSettings {
18878	s.TrackNumber = &v
18879	return s
18880}
18881
18882// Settings specific to TTML caption outputs, including Pass style information
18883// (TtmlStylePassthrough).
18884type TtmlDestinationSettings struct {
18885	_ struct{} `type:"structure"`
18886
18887	// Pass through style and position information from a TTML-like input source
18888	// (TTML, SMPTE-TT) to the TTML output.
18889	StylePassthrough *string `locationName:"stylePassthrough" type:"string" enum:"TtmlStylePassthrough"`
18890}
18891
18892// String returns the string representation
18893func (s TtmlDestinationSettings) String() string {
18894	return awsutil.Prettify(s)
18895}
18896
18897// GoString returns the string representation
18898func (s TtmlDestinationSettings) GoString() string {
18899	return s.String()
18900}
18901
18902// SetStylePassthrough sets the StylePassthrough field's value.
18903func (s *TtmlDestinationSettings) SetStylePassthrough(v string) *TtmlDestinationSettings {
18904	s.StylePassthrough = &v
18905	return s
18906}
18907
18908// To remove tags from a resource, send a request with the Amazon Resource Name
18909// (ARN) of the resource and the keys of the tags that you want to remove.
18910type UntagResourceInput struct {
18911	_ struct{} `type:"structure"`
18912
18913	// The Amazon Resource Name (ARN) of the resource that you want to remove tags
18914	// from. To get the ARN, send a GET request with the resource name.
18915	//
18916	// Arn is a required field
18917	Arn *string `location:"uri" locationName:"arn" type:"string" required:"true"`
18918
18919	// The keys of the tags that you want to remove from the resource.
18920	TagKeys []*string `locationName:"tagKeys" type:"list"`
18921}
18922
18923// String returns the string representation
18924func (s UntagResourceInput) String() string {
18925	return awsutil.Prettify(s)
18926}
18927
18928// GoString returns the string representation
18929func (s UntagResourceInput) GoString() string {
18930	return s.String()
18931}
18932
18933// Validate inspects the fields of the type to determine if they are valid.
18934func (s *UntagResourceInput) Validate() error {
18935	invalidParams := request.ErrInvalidParams{Context: "UntagResourceInput"}
18936	if s.Arn == nil {
18937		invalidParams.Add(request.NewErrParamRequired("Arn"))
18938	}
18939	if s.Arn != nil && len(*s.Arn) < 1 {
18940		invalidParams.Add(request.NewErrParamMinLen("Arn", 1))
18941	}
18942
18943	if invalidParams.Len() > 0 {
18944		return invalidParams
18945	}
18946	return nil
18947}
18948
18949// SetArn sets the Arn field's value.
18950func (s *UntagResourceInput) SetArn(v string) *UntagResourceInput {
18951	s.Arn = &v
18952	return s
18953}
18954
18955// SetTagKeys sets the TagKeys field's value.
18956func (s *UntagResourceInput) SetTagKeys(v []*string) *UntagResourceInput {
18957	s.TagKeys = v
18958	return s
18959}
18960
18961// A successful request to remove tags from a resource returns an OK message.
18962type UntagResourceOutput struct {
18963	_ struct{} `type:"structure"`
18964}
18965
18966// String returns the string representation
18967func (s UntagResourceOutput) String() string {
18968	return awsutil.Prettify(s)
18969}
18970
18971// GoString returns the string representation
18972func (s UntagResourceOutput) GoString() string {
18973	return s.String()
18974}
18975
18976// Modify a job template by sending a request with the job template name and
18977// any of the following that you wish to change: description, category, and
18978// queue.
18979type UpdateJobTemplateInput struct {
18980	_ struct{} `type:"structure"`
18981
18982	// Accelerated transcoding can significantly speed up jobs with long, visually
18983	// complex content. Outputs that use this feature incur pro-tier pricing. For
18984	// information about feature limitations, see the AWS Elemental MediaConvert
18985	// User Guide.
18986	AccelerationSettings *AccelerationSettings `locationName:"accelerationSettings" type:"structure"`
18987
18988	// The new category for the job template, if you are changing it.
18989	Category *string `locationName:"category" type:"string"`
18990
18991	// The new description for the job template, if you are changing it.
18992	Description *string `locationName:"description" type:"string"`
18993
18994	// Optional list of hop destinations.
18995	HopDestinations []*HopDestination `locationName:"hopDestinations" type:"list"`
18996
18997	// The name of the job template you are modifying
18998	//
18999	// Name is a required field
19000	Name *string `location:"uri" locationName:"name" type:"string" required:"true"`
19001
19002	// Specify the relative priority for this job. In any given queue, the service
19003	// begins processing the job with the highest value first. When more than one
19004	// job has the same priority, the service begins processing the job that you
19005	// submitted first. If you don't specify a priority, the service uses the default
19006	// value 0.
19007	Priority *int64 `locationName:"priority" type:"integer"`
19008
19009	// The new queue for the job template, if you are changing it.
19010	Queue *string `locationName:"queue" type:"string"`
19011
19012	// JobTemplateSettings contains all the transcode settings saved in the template
19013	// that will be applied to jobs created from it.
19014	Settings *JobTemplateSettings `locationName:"settings" type:"structure"`
19015
19016	// Specify how often MediaConvert sends STATUS_UPDATE events to Amazon CloudWatch
19017	// Events. Set the interval, in seconds, between status updates. MediaConvert
19018	// sends an update at this interval from the time the service begins processing
19019	// your job to the time it completes the transcode or encounters an error.
19020	StatusUpdateInterval *string `locationName:"statusUpdateInterval" type:"string" enum:"StatusUpdateInterval"`
19021}
19022
19023// String returns the string representation
19024func (s UpdateJobTemplateInput) String() string {
19025	return awsutil.Prettify(s)
19026}
19027
19028// GoString returns the string representation
19029func (s UpdateJobTemplateInput) GoString() string {
19030	return s.String()
19031}
19032
19033// Validate inspects the fields of the type to determine if they are valid.
19034func (s *UpdateJobTemplateInput) Validate() error {
19035	invalidParams := request.ErrInvalidParams{Context: "UpdateJobTemplateInput"}
19036	if s.Name == nil {
19037		invalidParams.Add(request.NewErrParamRequired("Name"))
19038	}
19039	if s.Name != nil && len(*s.Name) < 1 {
19040		invalidParams.Add(request.NewErrParamMinLen("Name", 1))
19041	}
19042	if s.Priority != nil && *s.Priority < -50 {
19043		invalidParams.Add(request.NewErrParamMinValue("Priority", -50))
19044	}
19045	if s.AccelerationSettings != nil {
19046		if err := s.AccelerationSettings.Validate(); err != nil {
19047			invalidParams.AddNested("AccelerationSettings", err.(request.ErrInvalidParams))
19048		}
19049	}
19050	if s.HopDestinations != nil {
19051		for i, v := range s.HopDestinations {
19052			if v == nil {
19053				continue
19054			}
19055			if err := v.Validate(); err != nil {
19056				invalidParams.AddNested(fmt.Sprintf("%s[%v]", "HopDestinations", i), err.(request.ErrInvalidParams))
19057			}
19058		}
19059	}
19060	if s.Settings != nil {
19061		if err := s.Settings.Validate(); err != nil {
19062			invalidParams.AddNested("Settings", err.(request.ErrInvalidParams))
19063		}
19064	}
19065
19066	if invalidParams.Len() > 0 {
19067		return invalidParams
19068	}
19069	return nil
19070}
19071
19072// SetAccelerationSettings sets the AccelerationSettings field's value.
19073func (s *UpdateJobTemplateInput) SetAccelerationSettings(v *AccelerationSettings) *UpdateJobTemplateInput {
19074	s.AccelerationSettings = v
19075	return s
19076}
19077
19078// SetCategory sets the Category field's value.
19079func (s *UpdateJobTemplateInput) SetCategory(v string) *UpdateJobTemplateInput {
19080	s.Category = &v
19081	return s
19082}
19083
19084// SetDescription sets the Description field's value.
19085func (s *UpdateJobTemplateInput) SetDescription(v string) *UpdateJobTemplateInput {
19086	s.Description = &v
19087	return s
19088}
19089
19090// SetHopDestinations sets the HopDestinations field's value.
19091func (s *UpdateJobTemplateInput) SetHopDestinations(v []*HopDestination) *UpdateJobTemplateInput {
19092	s.HopDestinations = v
19093	return s
19094}
19095
19096// SetName sets the Name field's value.
19097func (s *UpdateJobTemplateInput) SetName(v string) *UpdateJobTemplateInput {
19098	s.Name = &v
19099	return s
19100}
19101
19102// SetPriority sets the Priority field's value.
19103func (s *UpdateJobTemplateInput) SetPriority(v int64) *UpdateJobTemplateInput {
19104	s.Priority = &v
19105	return s
19106}
19107
19108// SetQueue sets the Queue field's value.
19109func (s *UpdateJobTemplateInput) SetQueue(v string) *UpdateJobTemplateInput {
19110	s.Queue = &v
19111	return s
19112}
19113
19114// SetSettings sets the Settings field's value.
19115func (s *UpdateJobTemplateInput) SetSettings(v *JobTemplateSettings) *UpdateJobTemplateInput {
19116	s.Settings = v
19117	return s
19118}
19119
19120// SetStatusUpdateInterval sets the StatusUpdateInterval field's value.
19121func (s *UpdateJobTemplateInput) SetStatusUpdateInterval(v string) *UpdateJobTemplateInput {
19122	s.StatusUpdateInterval = &v
19123	return s
19124}
19125
19126// Successful update job template requests will return the new job template
19127// JSON.
19128type UpdateJobTemplateOutput struct {
19129	_ struct{} `type:"structure"`
19130
19131	// A job template is a pre-made set of encoding instructions that you can use
19132	// to quickly create a job.
19133	JobTemplate *JobTemplate `locationName:"jobTemplate" type:"structure"`
19134}
19135
19136// String returns the string representation
19137func (s UpdateJobTemplateOutput) String() string {
19138	return awsutil.Prettify(s)
19139}
19140
19141// GoString returns the string representation
19142func (s UpdateJobTemplateOutput) GoString() string {
19143	return s.String()
19144}
19145
19146// SetJobTemplate sets the JobTemplate field's value.
19147func (s *UpdateJobTemplateOutput) SetJobTemplate(v *JobTemplate) *UpdateJobTemplateOutput {
19148	s.JobTemplate = v
19149	return s
19150}
19151
19152// Modify a preset by sending a request with the preset name and any of the
19153// following that you wish to change: description, category, and transcoding
19154// settings.
19155type UpdatePresetInput struct {
19156	_ struct{} `type:"structure"`
19157
19158	// The new category for the preset, if you are changing it.
19159	Category *string `locationName:"category" type:"string"`
19160
19161	// The new description for the preset, if you are changing it.
19162	Description *string `locationName:"description" type:"string"`
19163
19164	// The name of the preset you are modifying.
19165	//
19166	// Name is a required field
19167	Name *string `location:"uri" locationName:"name" type:"string" required:"true"`
19168
19169	// Settings for preset
19170	Settings *PresetSettings `locationName:"settings" type:"structure"`
19171}
19172
19173// String returns the string representation
19174func (s UpdatePresetInput) String() string {
19175	return awsutil.Prettify(s)
19176}
19177
19178// GoString returns the string representation
19179func (s UpdatePresetInput) GoString() string {
19180	return s.String()
19181}
19182
19183// Validate inspects the fields of the type to determine if they are valid.
19184func (s *UpdatePresetInput) Validate() error {
19185	invalidParams := request.ErrInvalidParams{Context: "UpdatePresetInput"}
19186	if s.Name == nil {
19187		invalidParams.Add(request.NewErrParamRequired("Name"))
19188	}
19189	if s.Name != nil && len(*s.Name) < 1 {
19190		invalidParams.Add(request.NewErrParamMinLen("Name", 1))
19191	}
19192	if s.Settings != nil {
19193		if err := s.Settings.Validate(); err != nil {
19194			invalidParams.AddNested("Settings", err.(request.ErrInvalidParams))
19195		}
19196	}
19197
19198	if invalidParams.Len() > 0 {
19199		return invalidParams
19200	}
19201	return nil
19202}
19203
19204// SetCategory sets the Category field's value.
19205func (s *UpdatePresetInput) SetCategory(v string) *UpdatePresetInput {
19206	s.Category = &v
19207	return s
19208}
19209
19210// SetDescription sets the Description field's value.
19211func (s *UpdatePresetInput) SetDescription(v string) *UpdatePresetInput {
19212	s.Description = &v
19213	return s
19214}
19215
19216// SetName sets the Name field's value.
19217func (s *UpdatePresetInput) SetName(v string) *UpdatePresetInput {
19218	s.Name = &v
19219	return s
19220}
19221
19222// SetSettings sets the Settings field's value.
19223func (s *UpdatePresetInput) SetSettings(v *PresetSettings) *UpdatePresetInput {
19224	s.Settings = v
19225	return s
19226}
19227
19228// Successful update preset requests will return the new preset JSON.
19229type UpdatePresetOutput struct {
19230	_ struct{} `type:"structure"`
19231
19232	// A preset is a collection of preconfigured media conversion settings that
19233	// you want MediaConvert to apply to the output during the conversion process.
19234	Preset *Preset `locationName:"preset" type:"structure"`
19235}
19236
19237// String returns the string representation
19238func (s UpdatePresetOutput) String() string {
19239	return awsutil.Prettify(s)
19240}
19241
19242// GoString returns the string representation
19243func (s UpdatePresetOutput) GoString() string {
19244	return s.String()
19245}
19246
19247// SetPreset sets the Preset field's value.
19248func (s *UpdatePresetOutput) SetPreset(v *Preset) *UpdatePresetOutput {
19249	s.Preset = v
19250	return s
19251}
19252
19253// Modify a queue by sending a request with the queue name and any changes to
19254// the queue.
19255type UpdateQueueInput struct {
19256	_ struct{} `type:"structure"`
19257
19258	// The new description for the queue, if you are changing it.
19259	Description *string `locationName:"description" type:"string"`
19260
19261	// The name of the queue that you are modifying.
19262	//
19263	// Name is a required field
19264	Name *string `location:"uri" locationName:"name" type:"string" required:"true"`
19265
19266	// The new details of your pricing plan for your reserved queue. When you set
19267	// up a new pricing plan to replace an expired one, you enter into another 12-month
19268	// commitment. When you add capacity to your queue by increasing the number
19269	// of RTS, you extend the term of your commitment to 12 months from when you
19270	// add capacity. After you make these commitments, you can't cancel them.
19271	ReservationPlanSettings *ReservationPlanSettings `locationName:"reservationPlanSettings" type:"structure"`
19272
19273	// Pause or activate a queue by changing its status between ACTIVE and PAUSED.
19274	// If you pause a queue, jobs in that queue won't begin. Jobs that are running
19275	// when you pause the queue continue to run until they finish or result in an
19276	// error.
19277	Status *string `locationName:"status" type:"string" enum:"QueueStatus"`
19278}
19279
19280// String returns the string representation
19281func (s UpdateQueueInput) String() string {
19282	return awsutil.Prettify(s)
19283}
19284
19285// GoString returns the string representation
19286func (s UpdateQueueInput) GoString() string {
19287	return s.String()
19288}
19289
19290// Validate inspects the fields of the type to determine if they are valid.
19291func (s *UpdateQueueInput) Validate() error {
19292	invalidParams := request.ErrInvalidParams{Context: "UpdateQueueInput"}
19293	if s.Name == nil {
19294		invalidParams.Add(request.NewErrParamRequired("Name"))
19295	}
19296	if s.Name != nil && len(*s.Name) < 1 {
19297		invalidParams.Add(request.NewErrParamMinLen("Name", 1))
19298	}
19299	if s.ReservationPlanSettings != nil {
19300		if err := s.ReservationPlanSettings.Validate(); err != nil {
19301			invalidParams.AddNested("ReservationPlanSettings", err.(request.ErrInvalidParams))
19302		}
19303	}
19304
19305	if invalidParams.Len() > 0 {
19306		return invalidParams
19307	}
19308	return nil
19309}
19310
19311// SetDescription sets the Description field's value.
19312func (s *UpdateQueueInput) SetDescription(v string) *UpdateQueueInput {
19313	s.Description = &v
19314	return s
19315}
19316
19317// SetName sets the Name field's value.
19318func (s *UpdateQueueInput) SetName(v string) *UpdateQueueInput {
19319	s.Name = &v
19320	return s
19321}
19322
19323// SetReservationPlanSettings sets the ReservationPlanSettings field's value.
19324func (s *UpdateQueueInput) SetReservationPlanSettings(v *ReservationPlanSettings) *UpdateQueueInput {
19325	s.ReservationPlanSettings = v
19326	return s
19327}
19328
19329// SetStatus sets the Status field's value.
19330func (s *UpdateQueueInput) SetStatus(v string) *UpdateQueueInput {
19331	s.Status = &v
19332	return s
19333}
19334
19335// Successful update queue requests return the new queue information in JSON
19336// format.
19337type UpdateQueueOutput struct {
19338	_ struct{} `type:"structure"`
19339
19340	// You can use queues to manage the resources that are available to your AWS
19341	// account for running multiple transcoding jobs at the same time. If you don't
19342	// specify a queue, the service sends all jobs through the default queue. For
19343	// more information, see https://docs.aws.amazon.com/mediaconvert/latest/ug/working-with-queues.html.
19344	Queue *Queue `locationName:"queue" type:"structure"`
19345}
19346
19347// String returns the string representation
19348func (s UpdateQueueOutput) String() string {
19349	return awsutil.Prettify(s)
19350}
19351
19352// GoString returns the string representation
19353func (s UpdateQueueOutput) GoString() string {
19354	return s.String()
19355}
19356
19357// SetQueue sets the Queue field's value.
19358func (s *UpdateQueueOutput) SetQueue(v *Queue) *UpdateQueueOutput {
19359	s.Queue = v
19360	return s
19361}
19362
19363// Required when you set (Codec) under (VideoDescription)>(CodecSettings) to
19364// the value VC3
19365type Vc3Settings struct {
19366	_ struct{} `type:"structure"`
19367
19368	// If you are using the console, use the Framerate setting to specify the frame
19369	// rate for this output. If you want to keep the same frame rate as the input
19370	// video, choose Follow source. If you want to do frame rate conversion, choose
19371	// a frame rate from the dropdown list or choose Custom. The framerates shown
19372	// in the dropdown list are decimal approximations of fractions. If you choose
19373	// Custom, specify your frame rate as a fraction. If you are creating your transcoding
19374	// job specification as a JSON file without the console, use FramerateControl
19375	// to specify which value the service uses for the frame rate for this output.
19376	// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
19377	// from the input. Choose SPECIFIED if you want the service to use the frame
19378	// rate you specify in the settings FramerateNumerator and FramerateDenominator.
19379	FramerateControl *string `locationName:"framerateControl" type:"string" enum:"Vc3FramerateControl"`
19380
19381	// Choose the method that you want MediaConvert to use when increasing or decreasing
19382	// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
19383	// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
19384	// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
19385	// smooth picture, but might introduce undesirable video artifacts. For complex
19386	// frame rate conversions, especially if your source video has already been
19387	// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
19388	// motion-compensated interpolation. FrameFormer chooses the best conversion
19389	// method frame by frame. Note that using FrameFormer increases the transcoding
19390	// time and incurs a significant add-on cost.
19391	FramerateConversionAlgorithm *string `locationName:"framerateConversionAlgorithm" type:"string" enum:"Vc3FramerateConversionAlgorithm"`
19392
19393	// When you use the API for transcode jobs that use frame rate conversion, specify
19394	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
19395	// FramerateDenominator to specify the denominator of this fraction. In this
19396	// example, use 1001 for the value of FramerateDenominator. When you use the
19397	// console for transcode jobs that use frame rate conversion, provide the value
19398	// as a decimal number for Framerate. In this example, specify 23.976.
19399	FramerateDenominator *int64 `locationName:"framerateDenominator" min:"1" type:"integer"`
19400
19401	// When you use the API for transcode jobs that use frame rate conversion, specify
19402	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
19403	// FramerateNumerator to specify the numerator of this fraction. In this example,
19404	// use 24000 for the value of FramerateNumerator. When you use the console for
19405	// transcode jobs that use frame rate conversion, provide the value as a decimal
19406	// number for Framerate. In this example, specify 23.976.
19407	FramerateNumerator *int64 `locationName:"framerateNumerator" min:"24" type:"integer"`
19408
19409	// Optional. Choose the scan line type for this output. If you don't specify
19410	// a value, MediaConvert will create a progressive output.
19411	InterlaceMode *string `locationName:"interlaceMode" type:"string" enum:"Vc3InterlaceMode"`
19412
19413	// Use this setting for interlaced outputs, when your output frame rate is half
19414	// of your input frame rate. In this situation, choose Optimized interlacing
19415	// (INTERLACED_OPTIMIZE) to create a better quality interlaced output. In this
19416	// case, each progressive frame from the input corresponds to an interlaced
19417	// field in the output. Keep the default value, Basic interlacing (INTERLACED),
19418	// for all other output frame rates. With basic interlacing, MediaConvert performs
19419	// any frame rate conversion first and then interlaces the frames. When you
19420	// choose Optimized interlacing and you set your output frame rate to a value
19421	// that isn't suitable for optimized interlacing, MediaConvert automatically
19422	// falls back to basic interlacing. Required settings: To use optimized interlacing,
19423	// you must set Telecine (telecine) to None (NONE) or Soft (SOFT). You can't
19424	// use optimized interlacing for hard telecine outputs. You must also set Interlace
19425	// mode (interlaceMode) to a value other than Progressive (PROGRESSIVE).
19426	ScanTypeConversionMode *string `locationName:"scanTypeConversionMode" type:"string" enum:"Vc3ScanTypeConversionMode"`
19427
19428	// Ignore this setting unless your input frame rate is 23.976 or 24 frames per
19429	// second (fps). Enable slow PAL to create a 25 fps output by relabeling the
19430	// video frames and resampling your audio. Note that enabling this setting will
19431	// slightly reduce the duration of your video. Related settings: You must also
19432	// set Framerate to 25. In your JSON job specification, set (framerateControl)
19433	// to (SPECIFIED), (framerateNumerator) to 25 and (framerateDenominator) to
19434	// 1.
19435	SlowPal *string `locationName:"slowPal" type:"string" enum:"Vc3SlowPal"`
19436
19437	// When you do frame rate conversion from 23.976 frames per second (fps) to
19438	// 29.97 fps, and your output scan type is interlaced, you can optionally enable
19439	// hard telecine (HARD) to create a smoother picture. When you keep the default
19440	// value, None (NONE), MediaConvert does a standard frame rate conversion to
19441	// 29.97 without doing anything with the field polarity to create a smoother
19442	// picture.
19443	Telecine *string `locationName:"telecine" type:"string" enum:"Vc3Telecine"`
19444
19445	// Specify the VC3 class to choose the quality characteristics for this output.
19446	// VC3 class, together with the settings Framerate (framerateNumerator and framerateDenominator)
19447	// and Resolution (height and width), determine your output bitrate. For example,
19448	// say that your video resolution is 1920x1080 and your framerate is 29.97.
19449	// Then Class 145 (CLASS_145) gives you an output with a bitrate of approximately
19450	// 145 Mbps and Class 220 (CLASS_220) gives you and output with a bitrate of
19451	// approximately 220 Mbps. VC3 class also specifies the color bit depth of your
19452	// output.
19453	Vc3Class *string `locationName:"vc3Class" type:"string" enum:"Vc3Class"`
19454}
19455
19456// String returns the string representation
19457func (s Vc3Settings) String() string {
19458	return awsutil.Prettify(s)
19459}
19460
19461// GoString returns the string representation
19462func (s Vc3Settings) GoString() string {
19463	return s.String()
19464}
19465
19466// Validate inspects the fields of the type to determine if they are valid.
19467func (s *Vc3Settings) Validate() error {
19468	invalidParams := request.ErrInvalidParams{Context: "Vc3Settings"}
19469	if s.FramerateDenominator != nil && *s.FramerateDenominator < 1 {
19470		invalidParams.Add(request.NewErrParamMinValue("FramerateDenominator", 1))
19471	}
19472	if s.FramerateNumerator != nil && *s.FramerateNumerator < 24 {
19473		invalidParams.Add(request.NewErrParamMinValue("FramerateNumerator", 24))
19474	}
19475
19476	if invalidParams.Len() > 0 {
19477		return invalidParams
19478	}
19479	return nil
19480}
19481
19482// SetFramerateControl sets the FramerateControl field's value.
19483func (s *Vc3Settings) SetFramerateControl(v string) *Vc3Settings {
19484	s.FramerateControl = &v
19485	return s
19486}
19487
19488// SetFramerateConversionAlgorithm sets the FramerateConversionAlgorithm field's value.
19489func (s *Vc3Settings) SetFramerateConversionAlgorithm(v string) *Vc3Settings {
19490	s.FramerateConversionAlgorithm = &v
19491	return s
19492}
19493
19494// SetFramerateDenominator sets the FramerateDenominator field's value.
19495func (s *Vc3Settings) SetFramerateDenominator(v int64) *Vc3Settings {
19496	s.FramerateDenominator = &v
19497	return s
19498}
19499
19500// SetFramerateNumerator sets the FramerateNumerator field's value.
19501func (s *Vc3Settings) SetFramerateNumerator(v int64) *Vc3Settings {
19502	s.FramerateNumerator = &v
19503	return s
19504}
19505
19506// SetInterlaceMode sets the InterlaceMode field's value.
19507func (s *Vc3Settings) SetInterlaceMode(v string) *Vc3Settings {
19508	s.InterlaceMode = &v
19509	return s
19510}
19511
19512// SetScanTypeConversionMode sets the ScanTypeConversionMode field's value.
19513func (s *Vc3Settings) SetScanTypeConversionMode(v string) *Vc3Settings {
19514	s.ScanTypeConversionMode = &v
19515	return s
19516}
19517
19518// SetSlowPal sets the SlowPal field's value.
19519func (s *Vc3Settings) SetSlowPal(v string) *Vc3Settings {
19520	s.SlowPal = &v
19521	return s
19522}
19523
19524// SetTelecine sets the Telecine field's value.
19525func (s *Vc3Settings) SetTelecine(v string) *Vc3Settings {
19526	s.Telecine = &v
19527	return s
19528}
19529
19530// SetVc3Class sets the Vc3Class field's value.
19531func (s *Vc3Settings) SetVc3Class(v string) *Vc3Settings {
19532	s.Vc3Class = &v
19533	return s
19534}
19535
19536// Video codec settings, (CodecSettings) under (VideoDescription), contains
19537// the group of settings related to video encoding. The settings in this group
19538// vary depending on the value that you choose for Video codec (Codec). For
19539// each codec enum that you choose, define the corresponding settings object.
19540// The following lists the codec enum, settings object pairs. * AV1, Av1Settings
19541// * AVC_INTRA, AvcIntraSettings * FRAME_CAPTURE, FrameCaptureSettings * H_264,
19542// H264Settings * H_265, H265Settings * MPEG2, Mpeg2Settings * PRORES, ProresSettings
19543// * VC3, Vc3Settings * VP8, Vp8Settings * VP9, Vp9Settings
19544type VideoCodecSettings struct {
19545	_ struct{} `type:"structure"`
19546
19547	// Required when you set Codec, under VideoDescription>CodecSettings to the
19548	// value AV1.
19549	Av1Settings *Av1Settings `locationName:"av1Settings" type:"structure"`
19550
19551	// Required when you set your output video codec to AVC-Intra. For more information
19552	// about the AVC-I settings, see the relevant specification. For detailed information
19553	// about SD and HD in AVC-I, see https://ieeexplore.ieee.org/document/7290936.
19554	// For information about 4K/2K in AVC-I, see https://pro-av.panasonic.net/en/avc-ultra/AVC-ULTRAoverview.pdf.
19555	AvcIntraSettings *AvcIntraSettings `locationName:"avcIntraSettings" type:"structure"`
19556
19557	// Specifies the video codec. This must be equal to one of the enum values defined
19558	// by the object VideoCodec.
19559	Codec *string `locationName:"codec" type:"string" enum:"VideoCodec"`
19560
19561	// Required when you set (Codec) under (VideoDescription)>(CodecSettings) to
19562	// the value FRAME_CAPTURE.
19563	FrameCaptureSettings *FrameCaptureSettings `locationName:"frameCaptureSettings" type:"structure"`
19564
19565	// Required when you set (Codec) under (VideoDescription)>(CodecSettings) to
19566	// the value H_264.
19567	H264Settings *H264Settings `locationName:"h264Settings" type:"structure"`
19568
19569	// Settings for H265 codec
19570	H265Settings *H265Settings `locationName:"h265Settings" type:"structure"`
19571
19572	// Required when you set (Codec) under (VideoDescription)>(CodecSettings) to
19573	// the value MPEG2.
19574	Mpeg2Settings *Mpeg2Settings `locationName:"mpeg2Settings" type:"structure"`
19575
19576	// Required when you set (Codec) under (VideoDescription)>(CodecSettings) to
19577	// the value PRORES.
19578	ProresSettings *ProresSettings `locationName:"proresSettings" type:"structure"`
19579
19580	// Required when you set (Codec) under (VideoDescription)>(CodecSettings) to
19581	// the value VC3
19582	Vc3Settings *Vc3Settings `locationName:"vc3Settings" type:"structure"`
19583
19584	// Required when you set (Codec) under (VideoDescription)>(CodecSettings) to
19585	// the value VP8.
19586	Vp8Settings *Vp8Settings `locationName:"vp8Settings" type:"structure"`
19587
19588	// Required when you set (Codec) under (VideoDescription)>(CodecSettings) to
19589	// the value VP9.
19590	Vp9Settings *Vp9Settings `locationName:"vp9Settings" type:"structure"`
19591}
19592
19593// String returns the string representation
19594func (s VideoCodecSettings) String() string {
19595	return awsutil.Prettify(s)
19596}
19597
19598// GoString returns the string representation
19599func (s VideoCodecSettings) GoString() string {
19600	return s.String()
19601}
19602
19603// Validate inspects the fields of the type to determine if they are valid.
19604func (s *VideoCodecSettings) Validate() error {
19605	invalidParams := request.ErrInvalidParams{Context: "VideoCodecSettings"}
19606	if s.Av1Settings != nil {
19607		if err := s.Av1Settings.Validate(); err != nil {
19608			invalidParams.AddNested("Av1Settings", err.(request.ErrInvalidParams))
19609		}
19610	}
19611	if s.AvcIntraSettings != nil {
19612		if err := s.AvcIntraSettings.Validate(); err != nil {
19613			invalidParams.AddNested("AvcIntraSettings", err.(request.ErrInvalidParams))
19614		}
19615	}
19616	if s.FrameCaptureSettings != nil {
19617		if err := s.FrameCaptureSettings.Validate(); err != nil {
19618			invalidParams.AddNested("FrameCaptureSettings", err.(request.ErrInvalidParams))
19619		}
19620	}
19621	if s.H264Settings != nil {
19622		if err := s.H264Settings.Validate(); err != nil {
19623			invalidParams.AddNested("H264Settings", err.(request.ErrInvalidParams))
19624		}
19625	}
19626	if s.H265Settings != nil {
19627		if err := s.H265Settings.Validate(); err != nil {
19628			invalidParams.AddNested("H265Settings", err.(request.ErrInvalidParams))
19629		}
19630	}
19631	if s.Mpeg2Settings != nil {
19632		if err := s.Mpeg2Settings.Validate(); err != nil {
19633			invalidParams.AddNested("Mpeg2Settings", err.(request.ErrInvalidParams))
19634		}
19635	}
19636	if s.ProresSettings != nil {
19637		if err := s.ProresSettings.Validate(); err != nil {
19638			invalidParams.AddNested("ProresSettings", err.(request.ErrInvalidParams))
19639		}
19640	}
19641	if s.Vc3Settings != nil {
19642		if err := s.Vc3Settings.Validate(); err != nil {
19643			invalidParams.AddNested("Vc3Settings", err.(request.ErrInvalidParams))
19644		}
19645	}
19646	if s.Vp8Settings != nil {
19647		if err := s.Vp8Settings.Validate(); err != nil {
19648			invalidParams.AddNested("Vp8Settings", err.(request.ErrInvalidParams))
19649		}
19650	}
19651	if s.Vp9Settings != nil {
19652		if err := s.Vp9Settings.Validate(); err != nil {
19653			invalidParams.AddNested("Vp9Settings", err.(request.ErrInvalidParams))
19654		}
19655	}
19656
19657	if invalidParams.Len() > 0 {
19658		return invalidParams
19659	}
19660	return nil
19661}
19662
19663// SetAv1Settings sets the Av1Settings field's value.
19664func (s *VideoCodecSettings) SetAv1Settings(v *Av1Settings) *VideoCodecSettings {
19665	s.Av1Settings = v
19666	return s
19667}
19668
19669// SetAvcIntraSettings sets the AvcIntraSettings field's value.
19670func (s *VideoCodecSettings) SetAvcIntraSettings(v *AvcIntraSettings) *VideoCodecSettings {
19671	s.AvcIntraSettings = v
19672	return s
19673}
19674
19675// SetCodec sets the Codec field's value.
19676func (s *VideoCodecSettings) SetCodec(v string) *VideoCodecSettings {
19677	s.Codec = &v
19678	return s
19679}
19680
19681// SetFrameCaptureSettings sets the FrameCaptureSettings field's value.
19682func (s *VideoCodecSettings) SetFrameCaptureSettings(v *FrameCaptureSettings) *VideoCodecSettings {
19683	s.FrameCaptureSettings = v
19684	return s
19685}
19686
19687// SetH264Settings sets the H264Settings field's value.
19688func (s *VideoCodecSettings) SetH264Settings(v *H264Settings) *VideoCodecSettings {
19689	s.H264Settings = v
19690	return s
19691}
19692
19693// SetH265Settings sets the H265Settings field's value.
19694func (s *VideoCodecSettings) SetH265Settings(v *H265Settings) *VideoCodecSettings {
19695	s.H265Settings = v
19696	return s
19697}
19698
19699// SetMpeg2Settings sets the Mpeg2Settings field's value.
19700func (s *VideoCodecSettings) SetMpeg2Settings(v *Mpeg2Settings) *VideoCodecSettings {
19701	s.Mpeg2Settings = v
19702	return s
19703}
19704
19705// SetProresSettings sets the ProresSettings field's value.
19706func (s *VideoCodecSettings) SetProresSettings(v *ProresSettings) *VideoCodecSettings {
19707	s.ProresSettings = v
19708	return s
19709}
19710
19711// SetVc3Settings sets the Vc3Settings field's value.
19712func (s *VideoCodecSettings) SetVc3Settings(v *Vc3Settings) *VideoCodecSettings {
19713	s.Vc3Settings = v
19714	return s
19715}
19716
19717// SetVp8Settings sets the Vp8Settings field's value.
19718func (s *VideoCodecSettings) SetVp8Settings(v *Vp8Settings) *VideoCodecSettings {
19719	s.Vp8Settings = v
19720	return s
19721}
19722
19723// SetVp9Settings sets the Vp9Settings field's value.
19724func (s *VideoCodecSettings) SetVp9Settings(v *Vp9Settings) *VideoCodecSettings {
19725	s.Vp9Settings = v
19726	return s
19727}
19728
19729// Settings for video outputs
19730type VideoDescription struct {
19731	_ struct{} `type:"structure"`
19732
19733	// This setting only applies to H.264, H.265, and MPEG2 outputs. Use Insert
19734	// AFD signaling (AfdSignaling) to specify whether the service includes AFD
19735	// values in the output video data and what those values are. * Choose None
19736	// to remove all AFD values from this output. * Choose Fixed to ignore input
19737	// AFD values and instead encode the value specified in the job. * Choose Auto
19738	// to calculate output AFD values based on the input AFD scaler data.
19739	AfdSignaling *string `locationName:"afdSignaling" type:"string" enum:"AfdSignaling"`
19740
19741	// The anti-alias filter is automatically applied to all outputs. The service
19742	// no longer accepts the value DISABLED for AntiAlias. If you specify that in
19743	// your job, the service will ignore the setting.
19744	AntiAlias *string `locationName:"antiAlias" type:"string" enum:"AntiAlias"`
19745
19746	// Video codec settings, (CodecSettings) under (VideoDescription), contains
19747	// the group of settings related to video encoding. The settings in this group
19748	// vary depending on the value that you choose for Video codec (Codec). For
19749	// each codec enum that you choose, define the corresponding settings object.
19750	// The following lists the codec enum, settings object pairs. * AV1, Av1Settings
19751	// * AVC_INTRA, AvcIntraSettings * FRAME_CAPTURE, FrameCaptureSettings * H_264,
19752	// H264Settings * H_265, H265Settings * MPEG2, Mpeg2Settings * PRORES, ProresSettings
19753	// * VC3, Vc3Settings * VP8, Vp8Settings * VP9, Vp9Settings
19754	CodecSettings *VideoCodecSettings `locationName:"codecSettings" type:"structure"`
19755
19756	// Choose Insert (INSERT) for this setting to include color metadata in this
19757	// output. Choose Ignore (IGNORE) to exclude color metadata from this output.
19758	// If you don't specify a value, the service sets this to Insert by default.
19759	ColorMetadata *string `locationName:"colorMetadata" type:"string" enum:"ColorMetadata"`
19760
19761	// Use Cropping selection (crop) to specify the video area that the service
19762	// will include in the output video frame.
19763	Crop *Rectangle `locationName:"crop" type:"structure"`
19764
19765	// Applies only to 29.97 fps outputs. When this feature is enabled, the service
19766	// will use drop-frame timecode on outputs. If it is not possible to use drop-frame
19767	// timecode, the system will fall back to non-drop-frame. This setting is enabled
19768	// by default when Timecode insertion (TimecodeInsertion) is enabled.
19769	DropFrameTimecode *string `locationName:"dropFrameTimecode" type:"string" enum:"DropFrameTimecode"`
19770
19771	// Applies only if you set AFD Signaling(AfdSignaling) to Fixed (FIXED). Use
19772	// Fixed (FixedAfd) to specify a four-bit AFD value which the service will write
19773	// on all frames of this video output.
19774	FixedAfd *int64 `locationName:"fixedAfd" type:"integer"`
19775
19776	// Use the Height (Height) setting to define the video resolution height for
19777	// this output. Specify in pixels. If you don't provide a value here, the service
19778	// will use the input height.
19779	Height *int64 `locationName:"height" min:"32" type:"integer"`
19780
19781	// Use Selection placement (position) to define the video area in your output
19782	// frame. The area outside of the rectangle that you specify here is black.
19783	Position *Rectangle `locationName:"position" type:"structure"`
19784
19785	// Use Respond to AFD (RespondToAfd) to specify how the service changes the
19786	// video itself in response to AFD values in the input. * Choose Respond to
19787	// clip the input video frame according to the AFD value, input display aspect
19788	// ratio, and output display aspect ratio. * Choose Passthrough to include the
19789	// input AFD values. Do not choose this when AfdSignaling is set to (NONE).
19790	// A preferred implementation of this workflow is to set RespondToAfd to (NONE)
19791	// and set AfdSignaling to (AUTO). * Choose None to remove all input AFD values
19792	// from this output.
19793	RespondToAfd *string `locationName:"respondToAfd" type:"string" enum:"RespondToAfd"`
19794
19795	// Specify how the service handles outputs that have a different aspect ratio
19796	// from the input aspect ratio. Choose Stretch to output (STRETCH_TO_OUTPUT)
19797	// to have the service stretch your video image to fit. Keep the setting Default
19798	// (DEFAULT) to have the service letterbox your video instead. This setting
19799	// overrides any value that you specify for the setting Selection placement
19800	// (position) in this output.
19801	ScalingBehavior *string `locationName:"scalingBehavior" type:"string" enum:"ScalingBehavior"`
19802
19803	// Use Sharpness (Sharpness) setting to specify the strength of anti-aliasing.
19804	// This setting changes the width of the anti-alias filter kernel used for scaling.
19805	// Sharpness only applies if your output resolution is different from your input
19806	// resolution. 0 is the softest setting, 100 the sharpest, and 50 recommended
19807	// for most content.
19808	Sharpness *int64 `locationName:"sharpness" type:"integer"`
19809
19810	// Applies only to H.264, H.265, MPEG2, and ProRes outputs. Only enable Timecode
19811	// insertion when the input frame rate is identical to the output frame rate.
19812	// To include timecodes in this output, set Timecode insertion (VideoTimecodeInsertion)
19813	// to PIC_TIMING_SEI. To leave them out, set it to DISABLED. Default is DISABLED.
19814	// When the service inserts timecodes in an output, by default, it uses any
19815	// embedded timecodes from the input. If none are present, the service will
19816	// set the timecode for the first output frame to zero. To change this default
19817	// behavior, adjust the settings under Timecode configuration (TimecodeConfig).
19818	// In the console, these settings are located under Job > Job settings > Timecode
19819	// configuration. Note - Timecode source under input settings (InputTimecodeSource)
19820	// does not affect the timecodes that are inserted in the output. Source under
19821	// Job settings > Timecode configuration (TimecodeSource) does.
19822	TimecodeInsertion *string `locationName:"timecodeInsertion" type:"string" enum:"VideoTimecodeInsertion"`
19823
19824	// Find additional transcoding features under Preprocessors (VideoPreprocessors).
19825	// Enable the features at each output individually. These features are disabled
19826	// by default.
19827	VideoPreprocessors *VideoPreprocessor `locationName:"videoPreprocessors" type:"structure"`
19828
19829	// Use Width (Width) to define the video resolution width, in pixels, for this
19830	// output. If you don't provide a value here, the service will use the input
19831	// width.
19832	Width *int64 `locationName:"width" min:"32" type:"integer"`
19833}
19834
19835// String returns the string representation
19836func (s VideoDescription) String() string {
19837	return awsutil.Prettify(s)
19838}
19839
19840// GoString returns the string representation
19841func (s VideoDescription) GoString() string {
19842	return s.String()
19843}
19844
19845// Validate inspects the fields of the type to determine if they are valid.
19846func (s *VideoDescription) Validate() error {
19847	invalidParams := request.ErrInvalidParams{Context: "VideoDescription"}
19848	if s.Height != nil && *s.Height < 32 {
19849		invalidParams.Add(request.NewErrParamMinValue("Height", 32))
19850	}
19851	if s.Width != nil && *s.Width < 32 {
19852		invalidParams.Add(request.NewErrParamMinValue("Width", 32))
19853	}
19854	if s.CodecSettings != nil {
19855		if err := s.CodecSettings.Validate(); err != nil {
19856			invalidParams.AddNested("CodecSettings", err.(request.ErrInvalidParams))
19857		}
19858	}
19859	if s.Crop != nil {
19860		if err := s.Crop.Validate(); err != nil {
19861			invalidParams.AddNested("Crop", err.(request.ErrInvalidParams))
19862		}
19863	}
19864	if s.Position != nil {
19865		if err := s.Position.Validate(); err != nil {
19866			invalidParams.AddNested("Position", err.(request.ErrInvalidParams))
19867		}
19868	}
19869	if s.VideoPreprocessors != nil {
19870		if err := s.VideoPreprocessors.Validate(); err != nil {
19871			invalidParams.AddNested("VideoPreprocessors", err.(request.ErrInvalidParams))
19872		}
19873	}
19874
19875	if invalidParams.Len() > 0 {
19876		return invalidParams
19877	}
19878	return nil
19879}
19880
19881// SetAfdSignaling sets the AfdSignaling field's value.
19882func (s *VideoDescription) SetAfdSignaling(v string) *VideoDescription {
19883	s.AfdSignaling = &v
19884	return s
19885}
19886
19887// SetAntiAlias sets the AntiAlias field's value.
19888func (s *VideoDescription) SetAntiAlias(v string) *VideoDescription {
19889	s.AntiAlias = &v
19890	return s
19891}
19892
19893// SetCodecSettings sets the CodecSettings field's value.
19894func (s *VideoDescription) SetCodecSettings(v *VideoCodecSettings) *VideoDescription {
19895	s.CodecSettings = v
19896	return s
19897}
19898
19899// SetColorMetadata sets the ColorMetadata field's value.
19900func (s *VideoDescription) SetColorMetadata(v string) *VideoDescription {
19901	s.ColorMetadata = &v
19902	return s
19903}
19904
19905// SetCrop sets the Crop field's value.
19906func (s *VideoDescription) SetCrop(v *Rectangle) *VideoDescription {
19907	s.Crop = v
19908	return s
19909}
19910
19911// SetDropFrameTimecode sets the DropFrameTimecode field's value.
19912func (s *VideoDescription) SetDropFrameTimecode(v string) *VideoDescription {
19913	s.DropFrameTimecode = &v
19914	return s
19915}
19916
19917// SetFixedAfd sets the FixedAfd field's value.
19918func (s *VideoDescription) SetFixedAfd(v int64) *VideoDescription {
19919	s.FixedAfd = &v
19920	return s
19921}
19922
19923// SetHeight sets the Height field's value.
19924func (s *VideoDescription) SetHeight(v int64) *VideoDescription {
19925	s.Height = &v
19926	return s
19927}
19928
19929// SetPosition sets the Position field's value.
19930func (s *VideoDescription) SetPosition(v *Rectangle) *VideoDescription {
19931	s.Position = v
19932	return s
19933}
19934
19935// SetRespondToAfd sets the RespondToAfd field's value.
19936func (s *VideoDescription) SetRespondToAfd(v string) *VideoDescription {
19937	s.RespondToAfd = &v
19938	return s
19939}
19940
19941// SetScalingBehavior sets the ScalingBehavior field's value.
19942func (s *VideoDescription) SetScalingBehavior(v string) *VideoDescription {
19943	s.ScalingBehavior = &v
19944	return s
19945}
19946
19947// SetSharpness sets the Sharpness field's value.
19948func (s *VideoDescription) SetSharpness(v int64) *VideoDescription {
19949	s.Sharpness = &v
19950	return s
19951}
19952
19953// SetTimecodeInsertion sets the TimecodeInsertion field's value.
19954func (s *VideoDescription) SetTimecodeInsertion(v string) *VideoDescription {
19955	s.TimecodeInsertion = &v
19956	return s
19957}
19958
19959// SetVideoPreprocessors sets the VideoPreprocessors field's value.
19960func (s *VideoDescription) SetVideoPreprocessors(v *VideoPreprocessor) *VideoDescription {
19961	s.VideoPreprocessors = v
19962	return s
19963}
19964
19965// SetWidth sets the Width field's value.
19966func (s *VideoDescription) SetWidth(v int64) *VideoDescription {
19967	s.Width = &v
19968	return s
19969}
19970
19971// Contains details about the output's video stream
19972type VideoDetail struct {
19973	_ struct{} `type:"structure"`
19974
19975	// Height in pixels for the output
19976	HeightInPx *int64 `locationName:"heightInPx" type:"integer"`
19977
19978	// Width in pixels for the output
19979	WidthInPx *int64 `locationName:"widthInPx" type:"integer"`
19980}
19981
19982// String returns the string representation
19983func (s VideoDetail) String() string {
19984	return awsutil.Prettify(s)
19985}
19986
19987// GoString returns the string representation
19988func (s VideoDetail) GoString() string {
19989	return s.String()
19990}
19991
19992// SetHeightInPx sets the HeightInPx field's value.
19993func (s *VideoDetail) SetHeightInPx(v int64) *VideoDetail {
19994	s.HeightInPx = &v
19995	return s
19996}
19997
19998// SetWidthInPx sets the WidthInPx field's value.
19999func (s *VideoDetail) SetWidthInPx(v int64) *VideoDetail {
20000	s.WidthInPx = &v
20001	return s
20002}
20003
20004// Find additional transcoding features under Preprocessors (VideoPreprocessors).
20005// Enable the features at each output individually. These features are disabled
20006// by default.
20007type VideoPreprocessor struct {
20008	_ struct{} `type:"structure"`
20009
20010	// Enable the Color corrector (ColorCorrector) feature if necessary. Enable
20011	// or disable this feature for each output individually. This setting is disabled
20012	// by default.
20013	ColorCorrector *ColorCorrector `locationName:"colorCorrector" type:"structure"`
20014
20015	// Use Deinterlacer (Deinterlacer) to produce smoother motion and a clearer
20016	// picture.
20017	Deinterlacer *Deinterlacer `locationName:"deinterlacer" type:"structure"`
20018
20019	// Enable Dolby Vision feature to produce Dolby Vision compatible video output.
20020	DolbyVision *DolbyVision `locationName:"dolbyVision" type:"structure"`
20021
20022	// Enable the Image inserter (ImageInserter) feature to include a graphic overlay
20023	// on your video. Enable or disable this feature for each output individually.
20024	// This setting is disabled by default.
20025	ImageInserter *ImageInserter `locationName:"imageInserter" type:"structure"`
20026
20027	// Enable the Noise reducer (NoiseReducer) feature to remove noise from your
20028	// video output if necessary. Enable or disable this feature for each output
20029	// individually. This setting is disabled by default.
20030	NoiseReducer *NoiseReducer `locationName:"noiseReducer" type:"structure"`
20031
20032	// If you work with a third party video watermarking partner, use the group
20033	// of settings that correspond with your watermarking partner to include watermarks
20034	// in your output.
20035	PartnerWatermarking *PartnerWatermarking `locationName:"partnerWatermarking" type:"structure"`
20036
20037	// Timecode burn-in (TimecodeBurnIn)--Burns the output timecode and specified
20038	// prefix into the output.
20039	TimecodeBurnin *TimecodeBurnin `locationName:"timecodeBurnin" type:"structure"`
20040}
20041
20042// String returns the string representation
20043func (s VideoPreprocessor) String() string {
20044	return awsutil.Prettify(s)
20045}
20046
20047// GoString returns the string representation
20048func (s VideoPreprocessor) GoString() string {
20049	return s.String()
20050}
20051
20052// Validate inspects the fields of the type to determine if they are valid.
20053func (s *VideoPreprocessor) Validate() error {
20054	invalidParams := request.ErrInvalidParams{Context: "VideoPreprocessor"}
20055	if s.ColorCorrector != nil {
20056		if err := s.ColorCorrector.Validate(); err != nil {
20057			invalidParams.AddNested("ColorCorrector", err.(request.ErrInvalidParams))
20058		}
20059	}
20060	if s.ImageInserter != nil {
20061		if err := s.ImageInserter.Validate(); err != nil {
20062			invalidParams.AddNested("ImageInserter", err.(request.ErrInvalidParams))
20063		}
20064	}
20065	if s.NoiseReducer != nil {
20066		if err := s.NoiseReducer.Validate(); err != nil {
20067			invalidParams.AddNested("NoiseReducer", err.(request.ErrInvalidParams))
20068		}
20069	}
20070	if s.PartnerWatermarking != nil {
20071		if err := s.PartnerWatermarking.Validate(); err != nil {
20072			invalidParams.AddNested("PartnerWatermarking", err.(request.ErrInvalidParams))
20073		}
20074	}
20075	if s.TimecodeBurnin != nil {
20076		if err := s.TimecodeBurnin.Validate(); err != nil {
20077			invalidParams.AddNested("TimecodeBurnin", err.(request.ErrInvalidParams))
20078		}
20079	}
20080
20081	if invalidParams.Len() > 0 {
20082		return invalidParams
20083	}
20084	return nil
20085}
20086
20087// SetColorCorrector sets the ColorCorrector field's value.
20088func (s *VideoPreprocessor) SetColorCorrector(v *ColorCorrector) *VideoPreprocessor {
20089	s.ColorCorrector = v
20090	return s
20091}
20092
20093// SetDeinterlacer sets the Deinterlacer field's value.
20094func (s *VideoPreprocessor) SetDeinterlacer(v *Deinterlacer) *VideoPreprocessor {
20095	s.Deinterlacer = v
20096	return s
20097}
20098
20099// SetDolbyVision sets the DolbyVision field's value.
20100func (s *VideoPreprocessor) SetDolbyVision(v *DolbyVision) *VideoPreprocessor {
20101	s.DolbyVision = v
20102	return s
20103}
20104
20105// SetImageInserter sets the ImageInserter field's value.
20106func (s *VideoPreprocessor) SetImageInserter(v *ImageInserter) *VideoPreprocessor {
20107	s.ImageInserter = v
20108	return s
20109}
20110
20111// SetNoiseReducer sets the NoiseReducer field's value.
20112func (s *VideoPreprocessor) SetNoiseReducer(v *NoiseReducer) *VideoPreprocessor {
20113	s.NoiseReducer = v
20114	return s
20115}
20116
20117// SetPartnerWatermarking sets the PartnerWatermarking field's value.
20118func (s *VideoPreprocessor) SetPartnerWatermarking(v *PartnerWatermarking) *VideoPreprocessor {
20119	s.PartnerWatermarking = v
20120	return s
20121}
20122
20123// SetTimecodeBurnin sets the TimecodeBurnin field's value.
20124func (s *VideoPreprocessor) SetTimecodeBurnin(v *TimecodeBurnin) *VideoPreprocessor {
20125	s.TimecodeBurnin = v
20126	return s
20127}
20128
20129// Selector for video.
20130type VideoSelector struct {
20131	_ struct{} `type:"structure"`
20132
20133	// Ignore this setting unless this input is a QuickTime animation with an alpha
20134	// channel. Use this setting to create separate Key and Fill outputs. In each
20135	// output, specify which part of the input MediaConvert uses. Leave this setting
20136	// at the default value DISCARD to delete the alpha channel and preserve the
20137	// video. Set it to REMAP_TO_LUMA to delete the video and map the alpha channel
20138	// to the luma channel of your outputs.
20139	AlphaBehavior *string `locationName:"alphaBehavior" type:"string" enum:"AlphaBehavior"`
20140
20141	// If your input video has accurate color space metadata, or if you don't know
20142	// about color space, leave this set to the default value Follow (FOLLOW). The
20143	// service will automatically detect your input color space. If your input video
20144	// has metadata indicating the wrong color space, specify the accurate color
20145	// space here. If your input video is HDR 10 and the SMPTE ST 2086 Mastering
20146	// Display Color Volume static metadata isn't present in your video stream,
20147	// or if that metadata is present but not accurate, choose Force HDR 10 (FORCE_HDR10)
20148	// here and specify correct values in the input HDR 10 metadata (Hdr10Metadata)
20149	// settings. For more information about MediaConvert HDR jobs, see https://docs.aws.amazon.com/console/mediaconvert/hdr.
20150	ColorSpace *string `locationName:"colorSpace" type:"string" enum:"ColorSpace"`
20151
20152	// There are two sources for color metadata, the input file and the job input
20153	// settings Color space (ColorSpace) and HDR master display information settings(Hdr10Metadata).
20154	// The Color space usage setting determines which takes precedence. Choose Force
20155	// (FORCE) to use color metadata from the input job settings. If you don't specify
20156	// values for those settings, the service defaults to using metadata from your
20157	// input. FALLBACK - Choose Fallback (FALLBACK) to use color metadata from the
20158	// source when it is present. If there's no color metadata in your input file,
20159	// the service defaults to using values you specify in the input settings.
20160	ColorSpaceUsage *string `locationName:"colorSpaceUsage" type:"string" enum:"ColorSpaceUsage"`
20161
20162	// Use these settings to provide HDR 10 metadata that is missing or inaccurate
20163	// in your input video. Appropriate values vary depending on the input video
20164	// and must be provided by a color grader. The color grader generates these
20165	// values during the HDR 10 mastering process. The valid range for each of these
20166	// settings is 0 to 50,000. Each increment represents 0.00002 in CIE1931 color
20167	// coordinate. Related settings - When you specify these values, you must also
20168	// set Color space (ColorSpace) to HDR 10 (HDR10). To specify whether the the
20169	// values you specify here take precedence over the values in the metadata of
20170	// your input file, set Color space usage (ColorSpaceUsage). To specify whether
20171	// color metadata is included in an output, set Color metadata (ColorMetadata).
20172	// For more information about MediaConvert HDR jobs, see https://docs.aws.amazon.com/console/mediaconvert/hdr.
20173	Hdr10Metadata *Hdr10Metadata `locationName:"hdr10Metadata" type:"structure"`
20174
20175	// Use PID (Pid) to select specific video data from an input file. Specify this
20176	// value as an integer; the system automatically converts it to the hexidecimal
20177	// value. For example, 257 selects PID 0x101. A PID, or packet identifier, is
20178	// an identifier for a set of data in an MPEG-2 transport stream container.
20179	Pid *int64 `locationName:"pid" min:"1" type:"integer"`
20180
20181	// Selects a specific program from within a multi-program transport stream.
20182	// Note that Quad 4K is not currently supported.
20183	ProgramNumber *int64 `locationName:"programNumber" type:"integer"`
20184
20185	// Use Rotate (InputRotate) to specify how the service rotates your video. You
20186	// can choose automatic rotation or specify a rotation. You can specify a clockwise
20187	// rotation of 0, 90, 180, or 270 degrees. If your input video container is
20188	// .mov or .mp4 and your input has rotation metadata, you can choose Automatic
20189	// to have the service rotate your video according to the rotation specified
20190	// in the metadata. The rotation must be within one degree of 90, 180, or 270
20191	// degrees. If the rotation metadata specifies any other rotation, the service
20192	// will default to no rotation. By default, the service does no rotation, even
20193	// if your input video has rotation metadata. The service doesn't pass through
20194	// rotation metadata.
20195	Rotate *string `locationName:"rotate" type:"string" enum:"InputRotate"`
20196}
20197
20198// String returns the string representation
20199func (s VideoSelector) String() string {
20200	return awsutil.Prettify(s)
20201}
20202
20203// GoString returns the string representation
20204func (s VideoSelector) GoString() string {
20205	return s.String()
20206}
20207
20208// Validate inspects the fields of the type to determine if they are valid.
20209func (s *VideoSelector) Validate() error {
20210	invalidParams := request.ErrInvalidParams{Context: "VideoSelector"}
20211	if s.Pid != nil && *s.Pid < 1 {
20212		invalidParams.Add(request.NewErrParamMinValue("Pid", 1))
20213	}
20214	if s.ProgramNumber != nil && *s.ProgramNumber < -2.147483648e+09 {
20215		invalidParams.Add(request.NewErrParamMinValue("ProgramNumber", -2.147483648e+09))
20216	}
20217
20218	if invalidParams.Len() > 0 {
20219		return invalidParams
20220	}
20221	return nil
20222}
20223
20224// SetAlphaBehavior sets the AlphaBehavior field's value.
20225func (s *VideoSelector) SetAlphaBehavior(v string) *VideoSelector {
20226	s.AlphaBehavior = &v
20227	return s
20228}
20229
20230// SetColorSpace sets the ColorSpace field's value.
20231func (s *VideoSelector) SetColorSpace(v string) *VideoSelector {
20232	s.ColorSpace = &v
20233	return s
20234}
20235
20236// SetColorSpaceUsage sets the ColorSpaceUsage field's value.
20237func (s *VideoSelector) SetColorSpaceUsage(v string) *VideoSelector {
20238	s.ColorSpaceUsage = &v
20239	return s
20240}
20241
20242// SetHdr10Metadata sets the Hdr10Metadata field's value.
20243func (s *VideoSelector) SetHdr10Metadata(v *Hdr10Metadata) *VideoSelector {
20244	s.Hdr10Metadata = v
20245	return s
20246}
20247
20248// SetPid sets the Pid field's value.
20249func (s *VideoSelector) SetPid(v int64) *VideoSelector {
20250	s.Pid = &v
20251	return s
20252}
20253
20254// SetProgramNumber sets the ProgramNumber field's value.
20255func (s *VideoSelector) SetProgramNumber(v int64) *VideoSelector {
20256	s.ProgramNumber = &v
20257	return s
20258}
20259
20260// SetRotate sets the Rotate field's value.
20261func (s *VideoSelector) SetRotate(v string) *VideoSelector {
20262	s.Rotate = &v
20263	return s
20264}
20265
20266// Required when you set Codec, under AudioDescriptions>CodecSettings, to the
20267// value Vorbis.
20268type VorbisSettings struct {
20269	_ struct{} `type:"structure"`
20270
20271	// Optional. Specify the number of channels in this output audio track. Choosing
20272	// Mono on the console gives you 1 output channel; choosing Stereo gives you
20273	// 2. In the API, valid values are 1 and 2. The default value is 2.
20274	Channels *int64 `locationName:"channels" min:"1" type:"integer"`
20275
20276	// Optional. Specify the audio sample rate in Hz. Valid values are 22050, 32000,
20277	// 44100, and 48000. The default value is 48000.
20278	SampleRate *int64 `locationName:"sampleRate" min:"22050" type:"integer"`
20279
20280	// Optional. Specify the variable audio quality of this Vorbis output from -1
20281	// (lowest quality, ~45 kbit/s) to 10 (highest quality, ~500 kbit/s). The default
20282	// value is 4 (~128 kbit/s). Values 5 and 6 are approximately 160 and 192 kbit/s,
20283	// respectively.
20284	VbrQuality *int64 `locationName:"vbrQuality" type:"integer"`
20285}
20286
20287// String returns the string representation
20288func (s VorbisSettings) String() string {
20289	return awsutil.Prettify(s)
20290}
20291
20292// GoString returns the string representation
20293func (s VorbisSettings) GoString() string {
20294	return s.String()
20295}
20296
20297// Validate inspects the fields of the type to determine if they are valid.
20298func (s *VorbisSettings) Validate() error {
20299	invalidParams := request.ErrInvalidParams{Context: "VorbisSettings"}
20300	if s.Channels != nil && *s.Channels < 1 {
20301		invalidParams.Add(request.NewErrParamMinValue("Channels", 1))
20302	}
20303	if s.SampleRate != nil && *s.SampleRate < 22050 {
20304		invalidParams.Add(request.NewErrParamMinValue("SampleRate", 22050))
20305	}
20306	if s.VbrQuality != nil && *s.VbrQuality < -1 {
20307		invalidParams.Add(request.NewErrParamMinValue("VbrQuality", -1))
20308	}
20309
20310	if invalidParams.Len() > 0 {
20311		return invalidParams
20312	}
20313	return nil
20314}
20315
20316// SetChannels sets the Channels field's value.
20317func (s *VorbisSettings) SetChannels(v int64) *VorbisSettings {
20318	s.Channels = &v
20319	return s
20320}
20321
20322// SetSampleRate sets the SampleRate field's value.
20323func (s *VorbisSettings) SetSampleRate(v int64) *VorbisSettings {
20324	s.SampleRate = &v
20325	return s
20326}
20327
20328// SetVbrQuality sets the VbrQuality field's value.
20329func (s *VorbisSettings) SetVbrQuality(v int64) *VorbisSettings {
20330	s.VbrQuality = &v
20331	return s
20332}
20333
20334// Required when you set (Codec) under (VideoDescription)>(CodecSettings) to
20335// the value VP8.
20336type Vp8Settings struct {
20337	_ struct{} `type:"structure"`
20338
20339	// Target bitrate in bits/second. For example, enter five megabits per second
20340	// as 5000000.
20341	Bitrate *int64 `locationName:"bitrate" min:"1000" type:"integer"`
20342
20343	// If you are using the console, use the Framerate setting to specify the frame
20344	// rate for this output. If you want to keep the same frame rate as the input
20345	// video, choose Follow source. If you want to do frame rate conversion, choose
20346	// a frame rate from the dropdown list or choose Custom. The framerates shown
20347	// in the dropdown list are decimal approximations of fractions. If you choose
20348	// Custom, specify your frame rate as a fraction. If you are creating your transcoding
20349	// job specification as a JSON file without the console, use FramerateControl
20350	// to specify which value the service uses for the frame rate for this output.
20351	// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
20352	// from the input. Choose SPECIFIED if you want the service to use the frame
20353	// rate you specify in the settings FramerateNumerator and FramerateDenominator.
20354	FramerateControl *string `locationName:"framerateControl" type:"string" enum:"Vp8FramerateControl"`
20355
20356	// Choose the method that you want MediaConvert to use when increasing or decreasing
20357	// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
20358	// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
20359	// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
20360	// smooth picture, but might introduce undesirable video artifacts. For complex
20361	// frame rate conversions, especially if your source video has already been
20362	// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
20363	// motion-compensated interpolation. FrameFormer chooses the best conversion
20364	// method frame by frame. Note that using FrameFormer increases the transcoding
20365	// time and incurs a significant add-on cost.
20366	FramerateConversionAlgorithm *string `locationName:"framerateConversionAlgorithm" type:"string" enum:"Vp8FramerateConversionAlgorithm"`
20367
20368	// When you use the API for transcode jobs that use frame rate conversion, specify
20369	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
20370	// FramerateDenominator to specify the denominator of this fraction. In this
20371	// example, use 1001 for the value of FramerateDenominator. When you use the
20372	// console for transcode jobs that use frame rate conversion, provide the value
20373	// as a decimal number for Framerate. In this example, specify 23.976.
20374	FramerateDenominator *int64 `locationName:"framerateDenominator" min:"1" type:"integer"`
20375
20376	// When you use the API for transcode jobs that use frame rate conversion, specify
20377	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
20378	// FramerateNumerator to specify the numerator of this fraction. In this example,
20379	// use 24000 for the value of FramerateNumerator. When you use the console for
20380	// transcode jobs that use frame rate conversion, provide the value as a decimal
20381	// number for Framerate. In this example, specify 23.976.
20382	FramerateNumerator *int64 `locationName:"framerateNumerator" min:"1" type:"integer"`
20383
20384	// GOP Length (keyframe interval) in frames. Must be greater than zero.
20385	GopSize *float64 `locationName:"gopSize" type:"double"`
20386
20387	// Optional. Size of buffer (HRD buffer model) in bits. For example, enter five
20388	// megabits as 5000000.
20389	HrdBufferSize *int64 `locationName:"hrdBufferSize" type:"integer"`
20390
20391	// Ignore this setting unless you set qualityTuningLevel to MULTI_PASS. Optional.
20392	// Specify the maximum bitrate in bits/second. For example, enter five megabits
20393	// per second as 5000000. The default behavior uses twice the target bitrate
20394	// as the maximum bitrate.
20395	MaxBitrate *int64 `locationName:"maxBitrate" min:"1000" type:"integer"`
20396
20397	// Optional. Specify how the service determines the pixel aspect ratio (PAR)
20398	// for this output. The default behavior, Follow source (INITIALIZE_FROM_SOURCE),
20399	// uses the PAR from your input video for your output. To specify a different
20400	// PAR in the console, choose any value other than Follow source. To specify
20401	// a different PAR by editing the JSON job specification, choose SPECIFIED.
20402	// When you choose SPECIFIED for this setting, you must also specify values
20403	// for the parNumerator and parDenominator settings.
20404	ParControl *string `locationName:"parControl" type:"string" enum:"Vp8ParControl"`
20405
20406	// Required when you set Pixel aspect ratio (parControl) to SPECIFIED. On the
20407	// console, this corresponds to any value other than Follow source. When you
20408	// specify an output pixel aspect ratio (PAR) that is different from your input
20409	// video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC
20410	// widescreen, you would specify the ratio 40:33. In this example, the value
20411	// for parDenominator is 33.
20412	ParDenominator *int64 `locationName:"parDenominator" min:"1" type:"integer"`
20413
20414	// Required when you set Pixel aspect ratio (parControl) to SPECIFIED. On the
20415	// console, this corresponds to any value other than Follow source. When you
20416	// specify an output pixel aspect ratio (PAR) that is different from your input
20417	// video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC
20418	// widescreen, you would specify the ratio 40:33. In this example, the value
20419	// for parNumerator is 40.
20420	ParNumerator *int64 `locationName:"parNumerator" min:"1" type:"integer"`
20421
20422	// Optional. Use Quality tuning level (qualityTuningLevel) to choose how you
20423	// want to trade off encoding speed for output video quality. The default behavior
20424	// is faster, lower quality, multi-pass encoding.
20425	QualityTuningLevel *string `locationName:"qualityTuningLevel" type:"string" enum:"Vp8QualityTuningLevel"`
20426
20427	// With the VP8 codec, you can use only the variable bitrate (VBR) rate control
20428	// mode.
20429	RateControlMode *string `locationName:"rateControlMode" type:"string" enum:"Vp8RateControlMode"`
20430}
20431
20432// String returns the string representation
20433func (s Vp8Settings) String() string {
20434	return awsutil.Prettify(s)
20435}
20436
20437// GoString returns the string representation
20438func (s Vp8Settings) GoString() string {
20439	return s.String()
20440}
20441
20442// Validate inspects the fields of the type to determine if they are valid.
20443func (s *Vp8Settings) Validate() error {
20444	invalidParams := request.ErrInvalidParams{Context: "Vp8Settings"}
20445	if s.Bitrate != nil && *s.Bitrate < 1000 {
20446		invalidParams.Add(request.NewErrParamMinValue("Bitrate", 1000))
20447	}
20448	if s.FramerateDenominator != nil && *s.FramerateDenominator < 1 {
20449		invalidParams.Add(request.NewErrParamMinValue("FramerateDenominator", 1))
20450	}
20451	if s.FramerateNumerator != nil && *s.FramerateNumerator < 1 {
20452		invalidParams.Add(request.NewErrParamMinValue("FramerateNumerator", 1))
20453	}
20454	if s.MaxBitrate != nil && *s.MaxBitrate < 1000 {
20455		invalidParams.Add(request.NewErrParamMinValue("MaxBitrate", 1000))
20456	}
20457	if s.ParDenominator != nil && *s.ParDenominator < 1 {
20458		invalidParams.Add(request.NewErrParamMinValue("ParDenominator", 1))
20459	}
20460	if s.ParNumerator != nil && *s.ParNumerator < 1 {
20461		invalidParams.Add(request.NewErrParamMinValue("ParNumerator", 1))
20462	}
20463
20464	if invalidParams.Len() > 0 {
20465		return invalidParams
20466	}
20467	return nil
20468}
20469
20470// SetBitrate sets the Bitrate field's value.
20471func (s *Vp8Settings) SetBitrate(v int64) *Vp8Settings {
20472	s.Bitrate = &v
20473	return s
20474}
20475
20476// SetFramerateControl sets the FramerateControl field's value.
20477func (s *Vp8Settings) SetFramerateControl(v string) *Vp8Settings {
20478	s.FramerateControl = &v
20479	return s
20480}
20481
20482// SetFramerateConversionAlgorithm sets the FramerateConversionAlgorithm field's value.
20483func (s *Vp8Settings) SetFramerateConversionAlgorithm(v string) *Vp8Settings {
20484	s.FramerateConversionAlgorithm = &v
20485	return s
20486}
20487
20488// SetFramerateDenominator sets the FramerateDenominator field's value.
20489func (s *Vp8Settings) SetFramerateDenominator(v int64) *Vp8Settings {
20490	s.FramerateDenominator = &v
20491	return s
20492}
20493
20494// SetFramerateNumerator sets the FramerateNumerator field's value.
20495func (s *Vp8Settings) SetFramerateNumerator(v int64) *Vp8Settings {
20496	s.FramerateNumerator = &v
20497	return s
20498}
20499
20500// SetGopSize sets the GopSize field's value.
20501func (s *Vp8Settings) SetGopSize(v float64) *Vp8Settings {
20502	s.GopSize = &v
20503	return s
20504}
20505
20506// SetHrdBufferSize sets the HrdBufferSize field's value.
20507func (s *Vp8Settings) SetHrdBufferSize(v int64) *Vp8Settings {
20508	s.HrdBufferSize = &v
20509	return s
20510}
20511
20512// SetMaxBitrate sets the MaxBitrate field's value.
20513func (s *Vp8Settings) SetMaxBitrate(v int64) *Vp8Settings {
20514	s.MaxBitrate = &v
20515	return s
20516}
20517
20518// SetParControl sets the ParControl field's value.
20519func (s *Vp8Settings) SetParControl(v string) *Vp8Settings {
20520	s.ParControl = &v
20521	return s
20522}
20523
20524// SetParDenominator sets the ParDenominator field's value.
20525func (s *Vp8Settings) SetParDenominator(v int64) *Vp8Settings {
20526	s.ParDenominator = &v
20527	return s
20528}
20529
20530// SetParNumerator sets the ParNumerator field's value.
20531func (s *Vp8Settings) SetParNumerator(v int64) *Vp8Settings {
20532	s.ParNumerator = &v
20533	return s
20534}
20535
20536// SetQualityTuningLevel sets the QualityTuningLevel field's value.
20537func (s *Vp8Settings) SetQualityTuningLevel(v string) *Vp8Settings {
20538	s.QualityTuningLevel = &v
20539	return s
20540}
20541
20542// SetRateControlMode sets the RateControlMode field's value.
20543func (s *Vp8Settings) SetRateControlMode(v string) *Vp8Settings {
20544	s.RateControlMode = &v
20545	return s
20546}
20547
20548// Required when you set (Codec) under (VideoDescription)>(CodecSettings) to
20549// the value VP9.
20550type Vp9Settings struct {
20551	_ struct{} `type:"structure"`
20552
20553	// Target bitrate in bits/second. For example, enter five megabits per second
20554	// as 5000000.
20555	Bitrate *int64 `locationName:"bitrate" min:"1000" type:"integer"`
20556
20557	// If you are using the console, use the Framerate setting to specify the frame
20558	// rate for this output. If you want to keep the same frame rate as the input
20559	// video, choose Follow source. If you want to do frame rate conversion, choose
20560	// a frame rate from the dropdown list or choose Custom. The framerates shown
20561	// in the dropdown list are decimal approximations of fractions. If you choose
20562	// Custom, specify your frame rate as a fraction. If you are creating your transcoding
20563	// job specification as a JSON file without the console, use FramerateControl
20564	// to specify which value the service uses for the frame rate for this output.
20565	// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
20566	// from the input. Choose SPECIFIED if you want the service to use the frame
20567	// rate you specify in the settings FramerateNumerator and FramerateDenominator.
20568	FramerateControl *string `locationName:"framerateControl" type:"string" enum:"Vp9FramerateControl"`
20569
20570	// Choose the method that you want MediaConvert to use when increasing or decreasing
20571	// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
20572	// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
20573	// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
20574	// smooth picture, but might introduce undesirable video artifacts. For complex
20575	// frame rate conversions, especially if your source video has already been
20576	// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
20577	// motion-compensated interpolation. FrameFormer chooses the best conversion
20578	// method frame by frame. Note that using FrameFormer increases the transcoding
20579	// time and incurs a significant add-on cost.
20580	FramerateConversionAlgorithm *string `locationName:"framerateConversionAlgorithm" type:"string" enum:"Vp9FramerateConversionAlgorithm"`
20581
20582	// When you use the API for transcode jobs that use frame rate conversion, specify
20583	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
20584	// FramerateDenominator to specify the denominator of this fraction. In this
20585	// example, use 1001 for the value of FramerateDenominator. When you use the
20586	// console for transcode jobs that use frame rate conversion, provide the value
20587	// as a decimal number for Framerate. In this example, specify 23.976.
20588	FramerateDenominator *int64 `locationName:"framerateDenominator" min:"1" type:"integer"`
20589
20590	// When you use the API for transcode jobs that use frame rate conversion, specify
20591	// the frame rate as a fraction. For example, 24000 / 1001 = 23.976 fps. Use
20592	// FramerateNumerator to specify the numerator of this fraction. In this example,
20593	// use 24000 for the value of FramerateNumerator. When you use the console for
20594	// transcode jobs that use frame rate conversion, provide the value as a decimal
20595	// number for Framerate. In this example, specify 23.976.
20596	FramerateNumerator *int64 `locationName:"framerateNumerator" min:"1" type:"integer"`
20597
20598	// GOP Length (keyframe interval) in frames. Must be greater than zero.
20599	GopSize *float64 `locationName:"gopSize" type:"double"`
20600
20601	// Size of buffer (HRD buffer model) in bits. For example, enter five megabits
20602	// as 5000000.
20603	HrdBufferSize *int64 `locationName:"hrdBufferSize" type:"integer"`
20604
20605	// Ignore this setting unless you set qualityTuningLevel to MULTI_PASS. Optional.
20606	// Specify the maximum bitrate in bits/second. For example, enter five megabits
20607	// per second as 5000000. The default behavior uses twice the target bitrate
20608	// as the maximum bitrate.
20609	MaxBitrate *int64 `locationName:"maxBitrate" min:"1000" type:"integer"`
20610
20611	// Optional. Specify how the service determines the pixel aspect ratio for this
20612	// output. The default behavior is to use the same pixel aspect ratio as your
20613	// input video.
20614	ParControl *string `locationName:"parControl" type:"string" enum:"Vp9ParControl"`
20615
20616	// Required when you set Pixel aspect ratio (parControl) to SPECIFIED. On the
20617	// console, this corresponds to any value other than Follow source. When you
20618	// specify an output pixel aspect ratio (PAR) that is different from your input
20619	// video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC
20620	// widescreen, you would specify the ratio 40:33. In this example, the value
20621	// for parDenominator is 33.
20622	ParDenominator *int64 `locationName:"parDenominator" min:"1" type:"integer"`
20623
20624	// Required when you set Pixel aspect ratio (parControl) to SPECIFIED. On the
20625	// console, this corresponds to any value other than Follow source. When you
20626	// specify an output pixel aspect ratio (PAR) that is different from your input
20627	// video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC
20628	// widescreen, you would specify the ratio 40:33. In this example, the value
20629	// for parNumerator is 40.
20630	ParNumerator *int64 `locationName:"parNumerator" min:"1" type:"integer"`
20631
20632	// Optional. Use Quality tuning level (qualityTuningLevel) to choose how you
20633	// want to trade off encoding speed for output video quality. The default behavior
20634	// is faster, lower quality, multi-pass encoding.
20635	QualityTuningLevel *string `locationName:"qualityTuningLevel" type:"string" enum:"Vp9QualityTuningLevel"`
20636
20637	// With the VP9 codec, you can use only the variable bitrate (VBR) rate control
20638	// mode.
20639	RateControlMode *string `locationName:"rateControlMode" type:"string" enum:"Vp9RateControlMode"`
20640}
20641
20642// String returns the string representation
20643func (s Vp9Settings) String() string {
20644	return awsutil.Prettify(s)
20645}
20646
20647// GoString returns the string representation
20648func (s Vp9Settings) GoString() string {
20649	return s.String()
20650}
20651
20652// Validate inspects the fields of the type to determine if they are valid.
20653func (s *Vp9Settings) Validate() error {
20654	invalidParams := request.ErrInvalidParams{Context: "Vp9Settings"}
20655	if s.Bitrate != nil && *s.Bitrate < 1000 {
20656		invalidParams.Add(request.NewErrParamMinValue("Bitrate", 1000))
20657	}
20658	if s.FramerateDenominator != nil && *s.FramerateDenominator < 1 {
20659		invalidParams.Add(request.NewErrParamMinValue("FramerateDenominator", 1))
20660	}
20661	if s.FramerateNumerator != nil && *s.FramerateNumerator < 1 {
20662		invalidParams.Add(request.NewErrParamMinValue("FramerateNumerator", 1))
20663	}
20664	if s.MaxBitrate != nil && *s.MaxBitrate < 1000 {
20665		invalidParams.Add(request.NewErrParamMinValue("MaxBitrate", 1000))
20666	}
20667	if s.ParDenominator != nil && *s.ParDenominator < 1 {
20668		invalidParams.Add(request.NewErrParamMinValue("ParDenominator", 1))
20669	}
20670	if s.ParNumerator != nil && *s.ParNumerator < 1 {
20671		invalidParams.Add(request.NewErrParamMinValue("ParNumerator", 1))
20672	}
20673
20674	if invalidParams.Len() > 0 {
20675		return invalidParams
20676	}
20677	return nil
20678}
20679
20680// SetBitrate sets the Bitrate field's value.
20681func (s *Vp9Settings) SetBitrate(v int64) *Vp9Settings {
20682	s.Bitrate = &v
20683	return s
20684}
20685
20686// SetFramerateControl sets the FramerateControl field's value.
20687func (s *Vp9Settings) SetFramerateControl(v string) *Vp9Settings {
20688	s.FramerateControl = &v
20689	return s
20690}
20691
20692// SetFramerateConversionAlgorithm sets the FramerateConversionAlgorithm field's value.
20693func (s *Vp9Settings) SetFramerateConversionAlgorithm(v string) *Vp9Settings {
20694	s.FramerateConversionAlgorithm = &v
20695	return s
20696}
20697
20698// SetFramerateDenominator sets the FramerateDenominator field's value.
20699func (s *Vp9Settings) SetFramerateDenominator(v int64) *Vp9Settings {
20700	s.FramerateDenominator = &v
20701	return s
20702}
20703
20704// SetFramerateNumerator sets the FramerateNumerator field's value.
20705func (s *Vp9Settings) SetFramerateNumerator(v int64) *Vp9Settings {
20706	s.FramerateNumerator = &v
20707	return s
20708}
20709
20710// SetGopSize sets the GopSize field's value.
20711func (s *Vp9Settings) SetGopSize(v float64) *Vp9Settings {
20712	s.GopSize = &v
20713	return s
20714}
20715
20716// SetHrdBufferSize sets the HrdBufferSize field's value.
20717func (s *Vp9Settings) SetHrdBufferSize(v int64) *Vp9Settings {
20718	s.HrdBufferSize = &v
20719	return s
20720}
20721
20722// SetMaxBitrate sets the MaxBitrate field's value.
20723func (s *Vp9Settings) SetMaxBitrate(v int64) *Vp9Settings {
20724	s.MaxBitrate = &v
20725	return s
20726}
20727
20728// SetParControl sets the ParControl field's value.
20729func (s *Vp9Settings) SetParControl(v string) *Vp9Settings {
20730	s.ParControl = &v
20731	return s
20732}
20733
20734// SetParDenominator sets the ParDenominator field's value.
20735func (s *Vp9Settings) SetParDenominator(v int64) *Vp9Settings {
20736	s.ParDenominator = &v
20737	return s
20738}
20739
20740// SetParNumerator sets the ParNumerator field's value.
20741func (s *Vp9Settings) SetParNumerator(v int64) *Vp9Settings {
20742	s.ParNumerator = &v
20743	return s
20744}
20745
20746// SetQualityTuningLevel sets the QualityTuningLevel field's value.
20747func (s *Vp9Settings) SetQualityTuningLevel(v string) *Vp9Settings {
20748	s.QualityTuningLevel = &v
20749	return s
20750}
20751
20752// SetRateControlMode sets the RateControlMode field's value.
20753func (s *Vp9Settings) SetRateControlMode(v string) *Vp9Settings {
20754	s.RateControlMode = &v
20755	return s
20756}
20757
20758// Required when you set (Codec) under (AudioDescriptions)>(CodecSettings) to
20759// the value WAV.
20760type WavSettings struct {
20761	_ struct{} `type:"structure"`
20762
20763	// Specify Bit depth (BitDepth), in bits per sample, to choose the encoding
20764	// quality for this audio track.
20765	BitDepth *int64 `locationName:"bitDepth" min:"16" type:"integer"`
20766
20767	// Specify the number of channels in this output audio track. Valid values are
20768	// 1 and even numbers up to 64. For example, 1, 2, 4, 6, and so on, up to 64.
20769	Channels *int64 `locationName:"channels" min:"1" type:"integer"`
20770
20771	// The service defaults to using RIFF for WAV outputs. If your output audio
20772	// is likely to exceed 4 GB in file size, or if you otherwise need the extended
20773	// support of the RF64 format, set your output WAV file format to RF64.
20774	Format *string `locationName:"format" type:"string" enum:"WavFormat"`
20775
20776	// Sample rate in Hz.
20777	SampleRate *int64 `locationName:"sampleRate" min:"8000" type:"integer"`
20778}
20779
20780// String returns the string representation
20781func (s WavSettings) String() string {
20782	return awsutil.Prettify(s)
20783}
20784
20785// GoString returns the string representation
20786func (s WavSettings) GoString() string {
20787	return s.String()
20788}
20789
20790// Validate inspects the fields of the type to determine if they are valid.
20791func (s *WavSettings) Validate() error {
20792	invalidParams := request.ErrInvalidParams{Context: "WavSettings"}
20793	if s.BitDepth != nil && *s.BitDepth < 16 {
20794		invalidParams.Add(request.NewErrParamMinValue("BitDepth", 16))
20795	}
20796	if s.Channels != nil && *s.Channels < 1 {
20797		invalidParams.Add(request.NewErrParamMinValue("Channels", 1))
20798	}
20799	if s.SampleRate != nil && *s.SampleRate < 8000 {
20800		invalidParams.Add(request.NewErrParamMinValue("SampleRate", 8000))
20801	}
20802
20803	if invalidParams.Len() > 0 {
20804		return invalidParams
20805	}
20806	return nil
20807}
20808
20809// SetBitDepth sets the BitDepth field's value.
20810func (s *WavSettings) SetBitDepth(v int64) *WavSettings {
20811	s.BitDepth = &v
20812	return s
20813}
20814
20815// SetChannels sets the Channels field's value.
20816func (s *WavSettings) SetChannels(v int64) *WavSettings {
20817	s.Channels = &v
20818	return s
20819}
20820
20821// SetFormat sets the Format field's value.
20822func (s *WavSettings) SetFormat(v string) *WavSettings {
20823	s.Format = &v
20824	return s
20825}
20826
20827// SetSampleRate sets the SampleRate field's value.
20828func (s *WavSettings) SetSampleRate(v int64) *WavSettings {
20829	s.SampleRate = &v
20830	return s
20831}
20832
20833// Choose BROADCASTER_MIXED_AD when the input contains pre-mixed main audio
20834// + audio description (AD) as a stereo pair. The value for AudioType will be
20835// set to 3, which signals to downstream systems that this stream contains "broadcaster
20836// mixed AD". Note that the input received by the encoder must contain pre-mixed
20837// audio; the encoder does not perform the mixing. When you choose BROADCASTER_MIXED_AD,
20838// the encoder ignores any values you provide in AudioType and FollowInputAudioType.
20839// Choose NORMAL when the input does not contain pre-mixed audio + audio description
20840// (AD). In this case, the encoder will use any values you provide for AudioType
20841// and FollowInputAudioType.
20842const (
20843	// AacAudioDescriptionBroadcasterMixBroadcasterMixedAd is a AacAudioDescriptionBroadcasterMix enum value
20844	AacAudioDescriptionBroadcasterMixBroadcasterMixedAd = "BROADCASTER_MIXED_AD"
20845
20846	// AacAudioDescriptionBroadcasterMixNormal is a AacAudioDescriptionBroadcasterMix enum value
20847	AacAudioDescriptionBroadcasterMixNormal = "NORMAL"
20848)
20849
20850// AacAudioDescriptionBroadcasterMix_Values returns all elements of the AacAudioDescriptionBroadcasterMix enum
20851func AacAudioDescriptionBroadcasterMix_Values() []string {
20852	return []string{
20853		AacAudioDescriptionBroadcasterMixBroadcasterMixedAd,
20854		AacAudioDescriptionBroadcasterMixNormal,
20855	}
20856}
20857
20858// AAC Profile.
20859const (
20860	// AacCodecProfileLc is a AacCodecProfile enum value
20861	AacCodecProfileLc = "LC"
20862
20863	// AacCodecProfileHev1 is a AacCodecProfile enum value
20864	AacCodecProfileHev1 = "HEV1"
20865
20866	// AacCodecProfileHev2 is a AacCodecProfile enum value
20867	AacCodecProfileHev2 = "HEV2"
20868)
20869
20870// AacCodecProfile_Values returns all elements of the AacCodecProfile enum
20871func AacCodecProfile_Values() []string {
20872	return []string{
20873		AacCodecProfileLc,
20874		AacCodecProfileHev1,
20875		AacCodecProfileHev2,
20876	}
20877}
20878
20879// Mono (Audio Description), Mono, Stereo, or 5.1 channel layout. Valid values
20880// depend on rate control mode and profile. "1.0 - Audio Description (Receiver
20881// Mix)" setting receives a stereo description plus control track and emits
20882// a mono AAC encode of the description track, with control data emitted in
20883// the PES header as per ETSI TS 101 154 Annex E.
20884const (
20885	// AacCodingModeAdReceiverMix is a AacCodingMode enum value
20886	AacCodingModeAdReceiverMix = "AD_RECEIVER_MIX"
20887
20888	// AacCodingModeCodingMode10 is a AacCodingMode enum value
20889	AacCodingModeCodingMode10 = "CODING_MODE_1_0"
20890
20891	// AacCodingModeCodingMode11 is a AacCodingMode enum value
20892	AacCodingModeCodingMode11 = "CODING_MODE_1_1"
20893
20894	// AacCodingModeCodingMode20 is a AacCodingMode enum value
20895	AacCodingModeCodingMode20 = "CODING_MODE_2_0"
20896
20897	// AacCodingModeCodingMode51 is a AacCodingMode enum value
20898	AacCodingModeCodingMode51 = "CODING_MODE_5_1"
20899)
20900
20901// AacCodingMode_Values returns all elements of the AacCodingMode enum
20902func AacCodingMode_Values() []string {
20903	return []string{
20904		AacCodingModeAdReceiverMix,
20905		AacCodingModeCodingMode10,
20906		AacCodingModeCodingMode11,
20907		AacCodingModeCodingMode20,
20908		AacCodingModeCodingMode51,
20909	}
20910}
20911
20912// Rate Control Mode.
20913const (
20914	// AacRateControlModeCbr is a AacRateControlMode enum value
20915	AacRateControlModeCbr = "CBR"
20916
20917	// AacRateControlModeVbr is a AacRateControlMode enum value
20918	AacRateControlModeVbr = "VBR"
20919)
20920
20921// AacRateControlMode_Values returns all elements of the AacRateControlMode enum
20922func AacRateControlMode_Values() []string {
20923	return []string{
20924		AacRateControlModeCbr,
20925		AacRateControlModeVbr,
20926	}
20927}
20928
20929// Enables LATM/LOAS AAC output. Note that if you use LATM/LOAS AAC in an output,
20930// you must choose "No container" for the output container.
20931const (
20932	// AacRawFormatLatmLoas is a AacRawFormat enum value
20933	AacRawFormatLatmLoas = "LATM_LOAS"
20934
20935	// AacRawFormatNone is a AacRawFormat enum value
20936	AacRawFormatNone = "NONE"
20937)
20938
20939// AacRawFormat_Values returns all elements of the AacRawFormat enum
20940func AacRawFormat_Values() []string {
20941	return []string{
20942		AacRawFormatLatmLoas,
20943		AacRawFormatNone,
20944	}
20945}
20946
20947// Use MPEG-2 AAC instead of MPEG-4 AAC audio for raw or MPEG-2 Transport Stream
20948// containers.
20949const (
20950	// AacSpecificationMpeg2 is a AacSpecification enum value
20951	AacSpecificationMpeg2 = "MPEG2"
20952
20953	// AacSpecificationMpeg4 is a AacSpecification enum value
20954	AacSpecificationMpeg4 = "MPEG4"
20955)
20956
20957// AacSpecification_Values returns all elements of the AacSpecification enum
20958func AacSpecification_Values() []string {
20959	return []string{
20960		AacSpecificationMpeg2,
20961		AacSpecificationMpeg4,
20962	}
20963}
20964
20965// VBR Quality Level - Only used if rate_control_mode is VBR.
20966const (
20967	// AacVbrQualityLow is a AacVbrQuality enum value
20968	AacVbrQualityLow = "LOW"
20969
20970	// AacVbrQualityMediumLow is a AacVbrQuality enum value
20971	AacVbrQualityMediumLow = "MEDIUM_LOW"
20972
20973	// AacVbrQualityMediumHigh is a AacVbrQuality enum value
20974	AacVbrQualityMediumHigh = "MEDIUM_HIGH"
20975
20976	// AacVbrQualityHigh is a AacVbrQuality enum value
20977	AacVbrQualityHigh = "HIGH"
20978)
20979
20980// AacVbrQuality_Values returns all elements of the AacVbrQuality enum
20981func AacVbrQuality_Values() []string {
20982	return []string{
20983		AacVbrQualityLow,
20984		AacVbrQualityMediumLow,
20985		AacVbrQualityMediumHigh,
20986		AacVbrQualityHigh,
20987	}
20988}
20989
20990// Specify the bitstream mode for the AC-3 stream that the encoder emits. For
20991// more information about the AC3 bitstream mode, see ATSC A/52-2012 (Annex
20992// E).
20993const (
20994	// Ac3BitstreamModeCompleteMain is a Ac3BitstreamMode enum value
20995	Ac3BitstreamModeCompleteMain = "COMPLETE_MAIN"
20996
20997	// Ac3BitstreamModeCommentary is a Ac3BitstreamMode enum value
20998	Ac3BitstreamModeCommentary = "COMMENTARY"
20999
21000	// Ac3BitstreamModeDialogue is a Ac3BitstreamMode enum value
21001	Ac3BitstreamModeDialogue = "DIALOGUE"
21002
21003	// Ac3BitstreamModeEmergency is a Ac3BitstreamMode enum value
21004	Ac3BitstreamModeEmergency = "EMERGENCY"
21005
21006	// Ac3BitstreamModeHearingImpaired is a Ac3BitstreamMode enum value
21007	Ac3BitstreamModeHearingImpaired = "HEARING_IMPAIRED"
21008
21009	// Ac3BitstreamModeMusicAndEffects is a Ac3BitstreamMode enum value
21010	Ac3BitstreamModeMusicAndEffects = "MUSIC_AND_EFFECTS"
21011
21012	// Ac3BitstreamModeVisuallyImpaired is a Ac3BitstreamMode enum value
21013	Ac3BitstreamModeVisuallyImpaired = "VISUALLY_IMPAIRED"
21014
21015	// Ac3BitstreamModeVoiceOver is a Ac3BitstreamMode enum value
21016	Ac3BitstreamModeVoiceOver = "VOICE_OVER"
21017)
21018
21019// Ac3BitstreamMode_Values returns all elements of the Ac3BitstreamMode enum
21020func Ac3BitstreamMode_Values() []string {
21021	return []string{
21022		Ac3BitstreamModeCompleteMain,
21023		Ac3BitstreamModeCommentary,
21024		Ac3BitstreamModeDialogue,
21025		Ac3BitstreamModeEmergency,
21026		Ac3BitstreamModeHearingImpaired,
21027		Ac3BitstreamModeMusicAndEffects,
21028		Ac3BitstreamModeVisuallyImpaired,
21029		Ac3BitstreamModeVoiceOver,
21030	}
21031}
21032
21033// Dolby Digital coding mode. Determines number of channels.
21034const (
21035	// Ac3CodingModeCodingMode10 is a Ac3CodingMode enum value
21036	Ac3CodingModeCodingMode10 = "CODING_MODE_1_0"
21037
21038	// Ac3CodingModeCodingMode11 is a Ac3CodingMode enum value
21039	Ac3CodingModeCodingMode11 = "CODING_MODE_1_1"
21040
21041	// Ac3CodingModeCodingMode20 is a Ac3CodingMode enum value
21042	Ac3CodingModeCodingMode20 = "CODING_MODE_2_0"
21043
21044	// Ac3CodingModeCodingMode32Lfe is a Ac3CodingMode enum value
21045	Ac3CodingModeCodingMode32Lfe = "CODING_MODE_3_2_LFE"
21046)
21047
21048// Ac3CodingMode_Values returns all elements of the Ac3CodingMode enum
21049func Ac3CodingMode_Values() []string {
21050	return []string{
21051		Ac3CodingModeCodingMode10,
21052		Ac3CodingModeCodingMode11,
21053		Ac3CodingModeCodingMode20,
21054		Ac3CodingModeCodingMode32Lfe,
21055	}
21056}
21057
21058// If set to FILM_STANDARD, adds dynamic range compression signaling to the
21059// output bitstream as defined in the Dolby Digital specification.
21060const (
21061	// Ac3DynamicRangeCompressionProfileFilmStandard is a Ac3DynamicRangeCompressionProfile enum value
21062	Ac3DynamicRangeCompressionProfileFilmStandard = "FILM_STANDARD"
21063
21064	// Ac3DynamicRangeCompressionProfileNone is a Ac3DynamicRangeCompressionProfile enum value
21065	Ac3DynamicRangeCompressionProfileNone = "NONE"
21066)
21067
21068// Ac3DynamicRangeCompressionProfile_Values returns all elements of the Ac3DynamicRangeCompressionProfile enum
21069func Ac3DynamicRangeCompressionProfile_Values() []string {
21070	return []string{
21071		Ac3DynamicRangeCompressionProfileFilmStandard,
21072		Ac3DynamicRangeCompressionProfileNone,
21073	}
21074}
21075
21076// Applies a 120Hz lowpass filter to the LFE channel prior to encoding. Only
21077// valid with 3_2_LFE coding mode.
21078const (
21079	// Ac3LfeFilterEnabled is a Ac3LfeFilter enum value
21080	Ac3LfeFilterEnabled = "ENABLED"
21081
21082	// Ac3LfeFilterDisabled is a Ac3LfeFilter enum value
21083	Ac3LfeFilterDisabled = "DISABLED"
21084)
21085
21086// Ac3LfeFilter_Values returns all elements of the Ac3LfeFilter enum
21087func Ac3LfeFilter_Values() []string {
21088	return []string{
21089		Ac3LfeFilterEnabled,
21090		Ac3LfeFilterDisabled,
21091	}
21092}
21093
21094// When set to FOLLOW_INPUT, encoder metadata will be sourced from the DD, DD+,
21095// or DolbyE decoder that supplied this audio data. If audio was not supplied
21096// from one of these streams, then the static metadata settings will be used.
21097const (
21098	// Ac3MetadataControlFollowInput is a Ac3MetadataControl enum value
21099	Ac3MetadataControlFollowInput = "FOLLOW_INPUT"
21100
21101	// Ac3MetadataControlUseConfigured is a Ac3MetadataControl enum value
21102	Ac3MetadataControlUseConfigured = "USE_CONFIGURED"
21103)
21104
21105// Ac3MetadataControl_Values returns all elements of the Ac3MetadataControl enum
21106func Ac3MetadataControl_Values() []string {
21107	return []string{
21108		Ac3MetadataControlFollowInput,
21109		Ac3MetadataControlUseConfigured,
21110	}
21111}
21112
21113// Specify whether the service runs your job with accelerated transcoding. Choose
21114// DISABLED if you don't want accelerated transcoding. Choose ENABLED if you
21115// want your job to run with accelerated transcoding and to fail if your input
21116// files or your job settings aren't compatible with accelerated transcoding.
21117// Choose PREFERRED if you want your job to run with accelerated transcoding
21118// if the job is compatible with the feature and to run at standard speed if
21119// it's not.
21120const (
21121	// AccelerationModeDisabled is a AccelerationMode enum value
21122	AccelerationModeDisabled = "DISABLED"
21123
21124	// AccelerationModeEnabled is a AccelerationMode enum value
21125	AccelerationModeEnabled = "ENABLED"
21126
21127	// AccelerationModePreferred is a AccelerationMode enum value
21128	AccelerationModePreferred = "PREFERRED"
21129)
21130
21131// AccelerationMode_Values returns all elements of the AccelerationMode enum
21132func AccelerationMode_Values() []string {
21133	return []string{
21134		AccelerationModeDisabled,
21135		AccelerationModeEnabled,
21136		AccelerationModePreferred,
21137	}
21138}
21139
21140// Describes whether the current job is running with accelerated transcoding.
21141// For jobs that have Acceleration (AccelerationMode) set to DISABLED, AccelerationStatus
21142// is always NOT_APPLICABLE. For jobs that have Acceleration (AccelerationMode)
21143// set to ENABLED or PREFERRED, AccelerationStatus is one of the other states.
21144// AccelerationStatus is IN_PROGRESS initially, while the service determines
21145// whether the input files and job settings are compatible with accelerated
21146// transcoding. If they are, AcclerationStatus is ACCELERATED. If your input
21147// files and job settings aren't compatible with accelerated transcoding, the
21148// service either fails your job or runs it without accelerated transcoding,
21149// depending on how you set Acceleration (AccelerationMode). When the service
21150// runs your job without accelerated transcoding, AccelerationStatus is NOT_ACCELERATED.
21151const (
21152	// AccelerationStatusNotApplicable is a AccelerationStatus enum value
21153	AccelerationStatusNotApplicable = "NOT_APPLICABLE"
21154
21155	// AccelerationStatusInProgress is a AccelerationStatus enum value
21156	AccelerationStatusInProgress = "IN_PROGRESS"
21157
21158	// AccelerationStatusAccelerated is a AccelerationStatus enum value
21159	AccelerationStatusAccelerated = "ACCELERATED"
21160
21161	// AccelerationStatusNotAccelerated is a AccelerationStatus enum value
21162	AccelerationStatusNotAccelerated = "NOT_ACCELERATED"
21163)
21164
21165// AccelerationStatus_Values returns all elements of the AccelerationStatus enum
21166func AccelerationStatus_Values() []string {
21167	return []string{
21168		AccelerationStatusNotApplicable,
21169		AccelerationStatusInProgress,
21170		AccelerationStatusAccelerated,
21171		AccelerationStatusNotAccelerated,
21172	}
21173}
21174
21175// This setting only applies to H.264, H.265, and MPEG2 outputs. Use Insert
21176// AFD signaling (AfdSignaling) to specify whether the service includes AFD
21177// values in the output video data and what those values are. * Choose None
21178// to remove all AFD values from this output. * Choose Fixed to ignore input
21179// AFD values and instead encode the value specified in the job. * Choose Auto
21180// to calculate output AFD values based on the input AFD scaler data.
21181const (
21182	// AfdSignalingNone is a AfdSignaling enum value
21183	AfdSignalingNone = "NONE"
21184
21185	// AfdSignalingAuto is a AfdSignaling enum value
21186	AfdSignalingAuto = "AUTO"
21187
21188	// AfdSignalingFixed is a AfdSignaling enum value
21189	AfdSignalingFixed = "FIXED"
21190)
21191
21192// AfdSignaling_Values returns all elements of the AfdSignaling enum
21193func AfdSignaling_Values() []string {
21194	return []string{
21195		AfdSignalingNone,
21196		AfdSignalingAuto,
21197		AfdSignalingFixed,
21198	}
21199}
21200
21201// Ignore this setting unless this input is a QuickTime animation with an alpha
21202// channel. Use this setting to create separate Key and Fill outputs. In each
21203// output, specify which part of the input MediaConvert uses. Leave this setting
21204// at the default value DISCARD to delete the alpha channel and preserve the
21205// video. Set it to REMAP_TO_LUMA to delete the video and map the alpha channel
21206// to the luma channel of your outputs.
21207const (
21208	// AlphaBehaviorDiscard is a AlphaBehavior enum value
21209	AlphaBehaviorDiscard = "DISCARD"
21210
21211	// AlphaBehaviorRemapToLuma is a AlphaBehavior enum value
21212	AlphaBehaviorRemapToLuma = "REMAP_TO_LUMA"
21213)
21214
21215// AlphaBehavior_Values returns all elements of the AlphaBehavior enum
21216func AlphaBehavior_Values() []string {
21217	return []string{
21218		AlphaBehaviorDiscard,
21219		AlphaBehaviorRemapToLuma,
21220	}
21221}
21222
21223// Specify whether this set of input captions appears in your outputs in both
21224// 608 and 708 format. If you choose Upconvert (UPCONVERT), MediaConvert includes
21225// the captions data in two ways: it passes the 608 data through using the 608
21226// compatibility bytes fields of the 708 wrapper, and it also translates the
21227// 608 data into 708.
21228const (
21229	// AncillaryConvert608To708Upconvert is a AncillaryConvert608To708 enum value
21230	AncillaryConvert608To708Upconvert = "UPCONVERT"
21231
21232	// AncillaryConvert608To708Disabled is a AncillaryConvert608To708 enum value
21233	AncillaryConvert608To708Disabled = "DISABLED"
21234)
21235
21236// AncillaryConvert608To708_Values returns all elements of the AncillaryConvert608To708 enum
21237func AncillaryConvert608To708_Values() []string {
21238	return []string{
21239		AncillaryConvert608To708Upconvert,
21240		AncillaryConvert608To708Disabled,
21241	}
21242}
21243
21244// By default, the service terminates any unterminated captions at the end of
21245// each input. If you want the caption to continue onto your next input, disable
21246// this setting.
21247const (
21248	// AncillaryTerminateCaptionsEndOfInput is a AncillaryTerminateCaptions enum value
21249	AncillaryTerminateCaptionsEndOfInput = "END_OF_INPUT"
21250
21251	// AncillaryTerminateCaptionsDisabled is a AncillaryTerminateCaptions enum value
21252	AncillaryTerminateCaptionsDisabled = "DISABLED"
21253)
21254
21255// AncillaryTerminateCaptions_Values returns all elements of the AncillaryTerminateCaptions enum
21256func AncillaryTerminateCaptions_Values() []string {
21257	return []string{
21258		AncillaryTerminateCaptionsEndOfInput,
21259		AncillaryTerminateCaptionsDisabled,
21260	}
21261}
21262
21263// The anti-alias filter is automatically applied to all outputs. The service
21264// no longer accepts the value DISABLED for AntiAlias. If you specify that in
21265// your job, the service will ignore the setting.
21266const (
21267	// AntiAliasDisabled is a AntiAlias enum value
21268	AntiAliasDisabled = "DISABLED"
21269
21270	// AntiAliasEnabled is a AntiAlias enum value
21271	AntiAliasEnabled = "ENABLED"
21272)
21273
21274// AntiAlias_Values returns all elements of the AntiAlias enum
21275func AntiAlias_Values() []string {
21276	return []string{
21277		AntiAliasDisabled,
21278		AntiAliasEnabled,
21279	}
21280}
21281
21282// You can add a tag for this mono-channel audio track to mimic its placement
21283// in a multi-channel layout. For example, if this track is the left surround
21284// channel, choose Left surround (LS).
21285const (
21286	// AudioChannelTagL is a AudioChannelTag enum value
21287	AudioChannelTagL = "L"
21288
21289	// AudioChannelTagR is a AudioChannelTag enum value
21290	AudioChannelTagR = "R"
21291
21292	// AudioChannelTagC is a AudioChannelTag enum value
21293	AudioChannelTagC = "C"
21294
21295	// AudioChannelTagLfe is a AudioChannelTag enum value
21296	AudioChannelTagLfe = "LFE"
21297
21298	// AudioChannelTagLs is a AudioChannelTag enum value
21299	AudioChannelTagLs = "LS"
21300
21301	// AudioChannelTagRs is a AudioChannelTag enum value
21302	AudioChannelTagRs = "RS"
21303
21304	// AudioChannelTagLc is a AudioChannelTag enum value
21305	AudioChannelTagLc = "LC"
21306
21307	// AudioChannelTagRc is a AudioChannelTag enum value
21308	AudioChannelTagRc = "RC"
21309
21310	// AudioChannelTagCs is a AudioChannelTag enum value
21311	AudioChannelTagCs = "CS"
21312
21313	// AudioChannelTagLsd is a AudioChannelTag enum value
21314	AudioChannelTagLsd = "LSD"
21315
21316	// AudioChannelTagRsd is a AudioChannelTag enum value
21317	AudioChannelTagRsd = "RSD"
21318
21319	// AudioChannelTagTcs is a AudioChannelTag enum value
21320	AudioChannelTagTcs = "TCS"
21321
21322	// AudioChannelTagVhl is a AudioChannelTag enum value
21323	AudioChannelTagVhl = "VHL"
21324
21325	// AudioChannelTagVhc is a AudioChannelTag enum value
21326	AudioChannelTagVhc = "VHC"
21327
21328	// AudioChannelTagVhr is a AudioChannelTag enum value
21329	AudioChannelTagVhr = "VHR"
21330)
21331
21332// AudioChannelTag_Values returns all elements of the AudioChannelTag enum
21333func AudioChannelTag_Values() []string {
21334	return []string{
21335		AudioChannelTagL,
21336		AudioChannelTagR,
21337		AudioChannelTagC,
21338		AudioChannelTagLfe,
21339		AudioChannelTagLs,
21340		AudioChannelTagRs,
21341		AudioChannelTagLc,
21342		AudioChannelTagRc,
21343		AudioChannelTagCs,
21344		AudioChannelTagLsd,
21345		AudioChannelTagRsd,
21346		AudioChannelTagTcs,
21347		AudioChannelTagVhl,
21348		AudioChannelTagVhc,
21349		AudioChannelTagVhr,
21350	}
21351}
21352
21353// Type of Audio codec.
21354const (
21355	// AudioCodecAac is a AudioCodec enum value
21356	AudioCodecAac = "AAC"
21357
21358	// AudioCodecMp2 is a AudioCodec enum value
21359	AudioCodecMp2 = "MP2"
21360
21361	// AudioCodecMp3 is a AudioCodec enum value
21362	AudioCodecMp3 = "MP3"
21363
21364	// AudioCodecWav is a AudioCodec enum value
21365	AudioCodecWav = "WAV"
21366
21367	// AudioCodecAiff is a AudioCodec enum value
21368	AudioCodecAiff = "AIFF"
21369
21370	// AudioCodecAc3 is a AudioCodec enum value
21371	AudioCodecAc3 = "AC3"
21372
21373	// AudioCodecEac3 is a AudioCodec enum value
21374	AudioCodecEac3 = "EAC3"
21375
21376	// AudioCodecEac3Atmos is a AudioCodec enum value
21377	AudioCodecEac3Atmos = "EAC3_ATMOS"
21378
21379	// AudioCodecVorbis is a AudioCodec enum value
21380	AudioCodecVorbis = "VORBIS"
21381
21382	// AudioCodecOpus is a AudioCodec enum value
21383	AudioCodecOpus = "OPUS"
21384
21385	// AudioCodecPassthrough is a AudioCodec enum value
21386	AudioCodecPassthrough = "PASSTHROUGH"
21387)
21388
21389// AudioCodec_Values returns all elements of the AudioCodec enum
21390func AudioCodec_Values() []string {
21391	return []string{
21392		AudioCodecAac,
21393		AudioCodecMp2,
21394		AudioCodecMp3,
21395		AudioCodecWav,
21396		AudioCodecAiff,
21397		AudioCodecAc3,
21398		AudioCodecEac3,
21399		AudioCodecEac3Atmos,
21400		AudioCodecVorbis,
21401		AudioCodecOpus,
21402		AudioCodecPassthrough,
21403	}
21404}
21405
21406// Enable this setting on one audio selector to set it as the default for the
21407// job. The service uses this default for outputs where it can't find the specified
21408// input audio. If you don't set a default, those outputs have no audio.
21409const (
21410	// AudioDefaultSelectionDefault is a AudioDefaultSelection enum value
21411	AudioDefaultSelectionDefault = "DEFAULT"
21412
21413	// AudioDefaultSelectionNotDefault is a AudioDefaultSelection enum value
21414	AudioDefaultSelectionNotDefault = "NOT_DEFAULT"
21415)
21416
21417// AudioDefaultSelection_Values returns all elements of the AudioDefaultSelection enum
21418func AudioDefaultSelection_Values() []string {
21419	return []string{
21420		AudioDefaultSelectionDefault,
21421		AudioDefaultSelectionNotDefault,
21422	}
21423}
21424
21425// Specify which source for language code takes precedence for this audio track.
21426// When you choose Follow input (FOLLOW_INPUT), the service uses the language
21427// code from the input track if it's present. If there's no languge code on
21428// the input track, the service uses the code that you specify in the setting
21429// Language code (languageCode or customLanguageCode). When you choose Use configured
21430// (USE_CONFIGURED), the service uses the language code that you specify.
21431const (
21432	// AudioLanguageCodeControlFollowInput is a AudioLanguageCodeControl enum value
21433	AudioLanguageCodeControlFollowInput = "FOLLOW_INPUT"
21434
21435	// AudioLanguageCodeControlUseConfigured is a AudioLanguageCodeControl enum value
21436	AudioLanguageCodeControlUseConfigured = "USE_CONFIGURED"
21437)
21438
21439// AudioLanguageCodeControl_Values returns all elements of the AudioLanguageCodeControl enum
21440func AudioLanguageCodeControl_Values() []string {
21441	return []string{
21442		AudioLanguageCodeControlFollowInput,
21443		AudioLanguageCodeControlUseConfigured,
21444	}
21445}
21446
21447// Choose one of the following audio normalization algorithms: ITU-R BS.1770-1:
21448// Ungated loudness. A measurement of ungated average loudness for an entire
21449// piece of content, suitable for measurement of short-form content under ATSC
21450// recommendation A/85. Supports up to 5.1 audio channels. ITU-R BS.1770-2:
21451// Gated loudness. A measurement of gated average loudness compliant with the
21452// requirements of EBU-R128. Supports up to 5.1 audio channels. ITU-R BS.1770-3:
21453// Modified peak. The same loudness measurement algorithm as 1770-2, with an
21454// updated true peak measurement. ITU-R BS.1770-4: Higher channel count. Allows
21455// for more audio channels than the other algorithms, including configurations
21456// such as 7.1.
21457const (
21458	// AudioNormalizationAlgorithmItuBs17701 is a AudioNormalizationAlgorithm enum value
21459	AudioNormalizationAlgorithmItuBs17701 = "ITU_BS_1770_1"
21460
21461	// AudioNormalizationAlgorithmItuBs17702 is a AudioNormalizationAlgorithm enum value
21462	AudioNormalizationAlgorithmItuBs17702 = "ITU_BS_1770_2"
21463
21464	// AudioNormalizationAlgorithmItuBs17703 is a AudioNormalizationAlgorithm enum value
21465	AudioNormalizationAlgorithmItuBs17703 = "ITU_BS_1770_3"
21466
21467	// AudioNormalizationAlgorithmItuBs17704 is a AudioNormalizationAlgorithm enum value
21468	AudioNormalizationAlgorithmItuBs17704 = "ITU_BS_1770_4"
21469)
21470
21471// AudioNormalizationAlgorithm_Values returns all elements of the AudioNormalizationAlgorithm enum
21472func AudioNormalizationAlgorithm_Values() []string {
21473	return []string{
21474		AudioNormalizationAlgorithmItuBs17701,
21475		AudioNormalizationAlgorithmItuBs17702,
21476		AudioNormalizationAlgorithmItuBs17703,
21477		AudioNormalizationAlgorithmItuBs17704,
21478	}
21479}
21480
21481// When enabled the output audio is corrected using the chosen algorithm. If
21482// disabled, the audio will be measured but not adjusted.
21483const (
21484	// AudioNormalizationAlgorithmControlCorrectAudio is a AudioNormalizationAlgorithmControl enum value
21485	AudioNormalizationAlgorithmControlCorrectAudio = "CORRECT_AUDIO"
21486
21487	// AudioNormalizationAlgorithmControlMeasureOnly is a AudioNormalizationAlgorithmControl enum value
21488	AudioNormalizationAlgorithmControlMeasureOnly = "MEASURE_ONLY"
21489)
21490
21491// AudioNormalizationAlgorithmControl_Values returns all elements of the AudioNormalizationAlgorithmControl enum
21492func AudioNormalizationAlgorithmControl_Values() []string {
21493	return []string{
21494		AudioNormalizationAlgorithmControlCorrectAudio,
21495		AudioNormalizationAlgorithmControlMeasureOnly,
21496	}
21497}
21498
21499// If set to LOG, log each output's audio track loudness to a CSV file.
21500const (
21501	// AudioNormalizationLoudnessLoggingLog is a AudioNormalizationLoudnessLogging enum value
21502	AudioNormalizationLoudnessLoggingLog = "LOG"
21503
21504	// AudioNormalizationLoudnessLoggingDontLog is a AudioNormalizationLoudnessLogging enum value
21505	AudioNormalizationLoudnessLoggingDontLog = "DONT_LOG"
21506)
21507
21508// AudioNormalizationLoudnessLogging_Values returns all elements of the AudioNormalizationLoudnessLogging enum
21509func AudioNormalizationLoudnessLogging_Values() []string {
21510	return []string{
21511		AudioNormalizationLoudnessLoggingLog,
21512		AudioNormalizationLoudnessLoggingDontLog,
21513	}
21514}
21515
21516// If set to TRUE_PEAK, calculate and log the TruePeak for each output's audio
21517// track loudness.
21518const (
21519	// AudioNormalizationPeakCalculationTruePeak is a AudioNormalizationPeakCalculation enum value
21520	AudioNormalizationPeakCalculationTruePeak = "TRUE_PEAK"
21521
21522	// AudioNormalizationPeakCalculationNone is a AudioNormalizationPeakCalculation enum value
21523	AudioNormalizationPeakCalculationNone = "NONE"
21524)
21525
21526// AudioNormalizationPeakCalculation_Values returns all elements of the AudioNormalizationPeakCalculation enum
21527func AudioNormalizationPeakCalculation_Values() []string {
21528	return []string{
21529		AudioNormalizationPeakCalculationTruePeak,
21530		AudioNormalizationPeakCalculationNone,
21531	}
21532}
21533
21534// Specifies the type of the audio selector.
21535const (
21536	// AudioSelectorTypePid is a AudioSelectorType enum value
21537	AudioSelectorTypePid = "PID"
21538
21539	// AudioSelectorTypeTrack is a AudioSelectorType enum value
21540	AudioSelectorTypeTrack = "TRACK"
21541
21542	// AudioSelectorTypeLanguageCode is a AudioSelectorType enum value
21543	AudioSelectorTypeLanguageCode = "LANGUAGE_CODE"
21544)
21545
21546// AudioSelectorType_Values returns all elements of the AudioSelectorType enum
21547func AudioSelectorType_Values() []string {
21548	return []string{
21549		AudioSelectorTypePid,
21550		AudioSelectorTypeTrack,
21551		AudioSelectorTypeLanguageCode,
21552	}
21553}
21554
21555// When set to FOLLOW_INPUT, if the input contains an ISO 639 audio_type, then
21556// that value is passed through to the output. If the input contains no ISO
21557// 639 audio_type, the value in Audio Type is included in the output. Otherwise
21558// the value in Audio Type is included in the output. Note that this field and
21559// audioType are both ignored if audioDescriptionBroadcasterMix is set to BROADCASTER_MIXED_AD.
21560const (
21561	// AudioTypeControlFollowInput is a AudioTypeControl enum value
21562	AudioTypeControlFollowInput = "FOLLOW_INPUT"
21563
21564	// AudioTypeControlUseConfigured is a AudioTypeControl enum value
21565	AudioTypeControlUseConfigured = "USE_CONFIGURED"
21566)
21567
21568// AudioTypeControl_Values returns all elements of the AudioTypeControl enum
21569func AudioTypeControl_Values() []string {
21570	return []string{
21571		AudioTypeControlFollowInput,
21572		AudioTypeControlUseConfigured,
21573	}
21574}
21575
21576// Specify the strength of any adaptive quantization filters that you enable.
21577// The value that you choose here applies to Spatial adaptive quantization (spatialAdaptiveQuantization).
21578const (
21579	// Av1AdaptiveQuantizationOff is a Av1AdaptiveQuantization enum value
21580	Av1AdaptiveQuantizationOff = "OFF"
21581
21582	// Av1AdaptiveQuantizationLow is a Av1AdaptiveQuantization enum value
21583	Av1AdaptiveQuantizationLow = "LOW"
21584
21585	// Av1AdaptiveQuantizationMedium is a Av1AdaptiveQuantization enum value
21586	Av1AdaptiveQuantizationMedium = "MEDIUM"
21587
21588	// Av1AdaptiveQuantizationHigh is a Av1AdaptiveQuantization enum value
21589	Av1AdaptiveQuantizationHigh = "HIGH"
21590
21591	// Av1AdaptiveQuantizationHigher is a Av1AdaptiveQuantization enum value
21592	Av1AdaptiveQuantizationHigher = "HIGHER"
21593
21594	// Av1AdaptiveQuantizationMax is a Av1AdaptiveQuantization enum value
21595	Av1AdaptiveQuantizationMax = "MAX"
21596)
21597
21598// Av1AdaptiveQuantization_Values returns all elements of the Av1AdaptiveQuantization enum
21599func Av1AdaptiveQuantization_Values() []string {
21600	return []string{
21601		Av1AdaptiveQuantizationOff,
21602		Av1AdaptiveQuantizationLow,
21603		Av1AdaptiveQuantizationMedium,
21604		Av1AdaptiveQuantizationHigh,
21605		Av1AdaptiveQuantizationHigher,
21606		Av1AdaptiveQuantizationMax,
21607	}
21608}
21609
21610// If you are using the console, use the Framerate setting to specify the frame
21611// rate for this output. If you want to keep the same frame rate as the input
21612// video, choose Follow source. If you want to do frame rate conversion, choose
21613// a frame rate from the dropdown list or choose Custom. The framerates shown
21614// in the dropdown list are decimal approximations of fractions. If you choose
21615// Custom, specify your frame rate as a fraction. If you are creating your transcoding
21616// job specification as a JSON file without the console, use FramerateControl
21617// to specify which value the service uses for the frame rate for this output.
21618// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
21619// from the input. Choose SPECIFIED if you want the service to use the frame
21620// rate you specify in the settings FramerateNumerator and FramerateDenominator.
21621const (
21622	// Av1FramerateControlInitializeFromSource is a Av1FramerateControl enum value
21623	Av1FramerateControlInitializeFromSource = "INITIALIZE_FROM_SOURCE"
21624
21625	// Av1FramerateControlSpecified is a Av1FramerateControl enum value
21626	Av1FramerateControlSpecified = "SPECIFIED"
21627)
21628
21629// Av1FramerateControl_Values returns all elements of the Av1FramerateControl enum
21630func Av1FramerateControl_Values() []string {
21631	return []string{
21632		Av1FramerateControlInitializeFromSource,
21633		Av1FramerateControlSpecified,
21634	}
21635}
21636
21637// Choose the method that you want MediaConvert to use when increasing or decreasing
21638// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
21639// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
21640// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
21641// smooth picture, but might introduce undesirable video artifacts. For complex
21642// frame rate conversions, especially if your source video has already been
21643// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
21644// motion-compensated interpolation. FrameFormer chooses the best conversion
21645// method frame by frame. Note that using FrameFormer increases the transcoding
21646// time and incurs a significant add-on cost.
21647const (
21648	// Av1FramerateConversionAlgorithmDuplicateDrop is a Av1FramerateConversionAlgorithm enum value
21649	Av1FramerateConversionAlgorithmDuplicateDrop = "DUPLICATE_DROP"
21650
21651	// Av1FramerateConversionAlgorithmInterpolate is a Av1FramerateConversionAlgorithm enum value
21652	Av1FramerateConversionAlgorithmInterpolate = "INTERPOLATE"
21653
21654	// Av1FramerateConversionAlgorithmFrameformer is a Av1FramerateConversionAlgorithm enum value
21655	Av1FramerateConversionAlgorithmFrameformer = "FRAMEFORMER"
21656)
21657
21658// Av1FramerateConversionAlgorithm_Values returns all elements of the Av1FramerateConversionAlgorithm enum
21659func Av1FramerateConversionAlgorithm_Values() []string {
21660	return []string{
21661		Av1FramerateConversionAlgorithmDuplicateDrop,
21662		Av1FramerateConversionAlgorithmInterpolate,
21663		Av1FramerateConversionAlgorithmFrameformer,
21664	}
21665}
21666
21667// 'With AV1 outputs, for rate control mode, MediaConvert supports only quality-defined
21668// variable bitrate (QVBR). You can''t use CBR or VBR.'
21669const (
21670	// Av1RateControlModeQvbr is a Av1RateControlMode enum value
21671	Av1RateControlModeQvbr = "QVBR"
21672)
21673
21674// Av1RateControlMode_Values returns all elements of the Av1RateControlMode enum
21675func Av1RateControlMode_Values() []string {
21676	return []string{
21677		Av1RateControlModeQvbr,
21678	}
21679}
21680
21681// Keep the default value, Enabled (ENABLED), to adjust quantization within
21682// each frame based on spatial variation of content complexity. When you enable
21683// this feature, the encoder uses fewer bits on areas that can sustain more
21684// distortion with no noticeable visual degradation and uses more bits on areas
21685// where any small distortion will be noticeable. For example, complex textured
21686// blocks are encoded with fewer bits and smooth textured blocks are encoded
21687// with more bits. Enabling this feature will almost always improve your video
21688// quality. Note, though, that this feature doesn't take into account where
21689// the viewer's attention is likely to be. If viewers are likely to be focusing
21690// their attention on a part of the screen with a lot of complex texture, you
21691// might choose to disable this feature. Related setting: When you enable spatial
21692// adaptive quantization, set the value for Adaptive quantization (adaptiveQuantization)
21693// depending on your content. For homogeneous content, such as cartoons and
21694// video games, set it to Low. For content with a wider variety of textures,
21695// set it to High or Higher.
21696const (
21697	// Av1SpatialAdaptiveQuantizationDisabled is a Av1SpatialAdaptiveQuantization enum value
21698	Av1SpatialAdaptiveQuantizationDisabled = "DISABLED"
21699
21700	// Av1SpatialAdaptiveQuantizationEnabled is a Av1SpatialAdaptiveQuantization enum value
21701	Av1SpatialAdaptiveQuantizationEnabled = "ENABLED"
21702)
21703
21704// Av1SpatialAdaptiveQuantization_Values returns all elements of the Av1SpatialAdaptiveQuantization enum
21705func Av1SpatialAdaptiveQuantization_Values() []string {
21706	return []string{
21707		Av1SpatialAdaptiveQuantizationDisabled,
21708		Av1SpatialAdaptiveQuantizationEnabled,
21709	}
21710}
21711
21712// Specify the AVC-Intra class of your output. The AVC-Intra class selection
21713// determines the output video bit rate depending on the frame rate of the output.
21714// Outputs with higher class values have higher bitrates and improved image
21715// quality. Note that for Class 4K/2K, MediaConvert supports only 4:2:2 chroma
21716// subsampling.
21717const (
21718	// AvcIntraClassClass50 is a AvcIntraClass enum value
21719	AvcIntraClassClass50 = "CLASS_50"
21720
21721	// AvcIntraClassClass100 is a AvcIntraClass enum value
21722	AvcIntraClassClass100 = "CLASS_100"
21723
21724	// AvcIntraClassClass200 is a AvcIntraClass enum value
21725	AvcIntraClassClass200 = "CLASS_200"
21726
21727	// AvcIntraClassClass4k2k is a AvcIntraClass enum value
21728	AvcIntraClassClass4k2k = "CLASS_4K_2K"
21729)
21730
21731// AvcIntraClass_Values returns all elements of the AvcIntraClass enum
21732func AvcIntraClass_Values() []string {
21733	return []string{
21734		AvcIntraClassClass50,
21735		AvcIntraClassClass100,
21736		AvcIntraClassClass200,
21737		AvcIntraClassClass4k2k,
21738	}
21739}
21740
21741// If you are using the console, use the Framerate setting to specify the frame
21742// rate for this output. If you want to keep the same frame rate as the input
21743// video, choose Follow source. If you want to do frame rate conversion, choose
21744// a frame rate from the dropdown list or choose Custom. The framerates shown
21745// in the dropdown list are decimal approximations of fractions. If you choose
21746// Custom, specify your frame rate as a fraction. If you are creating your transcoding
21747// job specification as a JSON file without the console, use FramerateControl
21748// to specify which value the service uses for the frame rate for this output.
21749// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
21750// from the input. Choose SPECIFIED if you want the service to use the frame
21751// rate you specify in the settings FramerateNumerator and FramerateDenominator.
21752const (
21753	// AvcIntraFramerateControlInitializeFromSource is a AvcIntraFramerateControl enum value
21754	AvcIntraFramerateControlInitializeFromSource = "INITIALIZE_FROM_SOURCE"
21755
21756	// AvcIntraFramerateControlSpecified is a AvcIntraFramerateControl enum value
21757	AvcIntraFramerateControlSpecified = "SPECIFIED"
21758)
21759
21760// AvcIntraFramerateControl_Values returns all elements of the AvcIntraFramerateControl enum
21761func AvcIntraFramerateControl_Values() []string {
21762	return []string{
21763		AvcIntraFramerateControlInitializeFromSource,
21764		AvcIntraFramerateControlSpecified,
21765	}
21766}
21767
21768// Choose the method that you want MediaConvert to use when increasing or decreasing
21769// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
21770// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
21771// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
21772// smooth picture, but might introduce undesirable video artifacts. For complex
21773// frame rate conversions, especially if your source video has already been
21774// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
21775// motion-compensated interpolation. FrameFormer chooses the best conversion
21776// method frame by frame. Note that using FrameFormer increases the transcoding
21777// time and incurs a significant add-on cost.
21778const (
21779	// AvcIntraFramerateConversionAlgorithmDuplicateDrop is a AvcIntraFramerateConversionAlgorithm enum value
21780	AvcIntraFramerateConversionAlgorithmDuplicateDrop = "DUPLICATE_DROP"
21781
21782	// AvcIntraFramerateConversionAlgorithmInterpolate is a AvcIntraFramerateConversionAlgorithm enum value
21783	AvcIntraFramerateConversionAlgorithmInterpolate = "INTERPOLATE"
21784
21785	// AvcIntraFramerateConversionAlgorithmFrameformer is a AvcIntraFramerateConversionAlgorithm enum value
21786	AvcIntraFramerateConversionAlgorithmFrameformer = "FRAMEFORMER"
21787)
21788
21789// AvcIntraFramerateConversionAlgorithm_Values returns all elements of the AvcIntraFramerateConversionAlgorithm enum
21790func AvcIntraFramerateConversionAlgorithm_Values() []string {
21791	return []string{
21792		AvcIntraFramerateConversionAlgorithmDuplicateDrop,
21793		AvcIntraFramerateConversionAlgorithmInterpolate,
21794		AvcIntraFramerateConversionAlgorithmFrameformer,
21795	}
21796}
21797
21798// Choose the scan line type for the output. Keep the default value, Progressive
21799// (PROGRESSIVE) to create a progressive output, regardless of the scan type
21800// of your input. Use Top field first (TOP_FIELD) or Bottom field first (BOTTOM_FIELD)
21801// to create an output that's interlaced with the same field polarity throughout.
21802// Use Follow, default top (FOLLOW_TOP_FIELD) or Follow, default bottom (FOLLOW_BOTTOM_FIELD)
21803// to produce outputs with the same field polarity as the source. For jobs that
21804// have multiple inputs, the output field polarity might change over the course
21805// of the output. Follow behavior depends on the input scan type. If the source
21806// is interlaced, the output will be interlaced with the same polarity as the
21807// source. If the source is progressive, the output will be interlaced with
21808// top field bottom field first, depending on which of the Follow options you
21809// choose.
21810const (
21811	// AvcIntraInterlaceModeProgressive is a AvcIntraInterlaceMode enum value
21812	AvcIntraInterlaceModeProgressive = "PROGRESSIVE"
21813
21814	// AvcIntraInterlaceModeTopField is a AvcIntraInterlaceMode enum value
21815	AvcIntraInterlaceModeTopField = "TOP_FIELD"
21816
21817	// AvcIntraInterlaceModeBottomField is a AvcIntraInterlaceMode enum value
21818	AvcIntraInterlaceModeBottomField = "BOTTOM_FIELD"
21819
21820	// AvcIntraInterlaceModeFollowTopField is a AvcIntraInterlaceMode enum value
21821	AvcIntraInterlaceModeFollowTopField = "FOLLOW_TOP_FIELD"
21822
21823	// AvcIntraInterlaceModeFollowBottomField is a AvcIntraInterlaceMode enum value
21824	AvcIntraInterlaceModeFollowBottomField = "FOLLOW_BOTTOM_FIELD"
21825)
21826
21827// AvcIntraInterlaceMode_Values returns all elements of the AvcIntraInterlaceMode enum
21828func AvcIntraInterlaceMode_Values() []string {
21829	return []string{
21830		AvcIntraInterlaceModeProgressive,
21831		AvcIntraInterlaceModeTopField,
21832		AvcIntraInterlaceModeBottomField,
21833		AvcIntraInterlaceModeFollowTopField,
21834		AvcIntraInterlaceModeFollowBottomField,
21835	}
21836}
21837
21838// Use this setting for interlaced outputs, when your output frame rate is half
21839// of your input frame rate. In this situation, choose Optimized interlacing
21840// (INTERLACED_OPTIMIZE) to create a better quality interlaced output. In this
21841// case, each progressive frame from the input corresponds to an interlaced
21842// field in the output. Keep the default value, Basic interlacing (INTERLACED),
21843// for all other output frame rates. With basic interlacing, MediaConvert performs
21844// any frame rate conversion first and then interlaces the frames. When you
21845// choose Optimized interlacing and you set your output frame rate to a value
21846// that isn't suitable for optimized interlacing, MediaConvert automatically
21847// falls back to basic interlacing. Required settings: To use optimized interlacing,
21848// you must set Telecine (telecine) to None (NONE) or Soft (SOFT). You can't
21849// use optimized interlacing for hard telecine outputs. You must also set Interlace
21850// mode (interlaceMode) to a value other than Progressive (PROGRESSIVE).
21851const (
21852	// AvcIntraScanTypeConversionModeInterlaced is a AvcIntraScanTypeConversionMode enum value
21853	AvcIntraScanTypeConversionModeInterlaced = "INTERLACED"
21854
21855	// AvcIntraScanTypeConversionModeInterlacedOptimize is a AvcIntraScanTypeConversionMode enum value
21856	AvcIntraScanTypeConversionModeInterlacedOptimize = "INTERLACED_OPTIMIZE"
21857)
21858
21859// AvcIntraScanTypeConversionMode_Values returns all elements of the AvcIntraScanTypeConversionMode enum
21860func AvcIntraScanTypeConversionMode_Values() []string {
21861	return []string{
21862		AvcIntraScanTypeConversionModeInterlaced,
21863		AvcIntraScanTypeConversionModeInterlacedOptimize,
21864	}
21865}
21866
21867// Ignore this setting unless your input frame rate is 23.976 or 24 frames per
21868// second (fps). Enable slow PAL to create a 25 fps output. When you enable
21869// slow PAL, MediaConvert relabels the video frames to 25 fps and resamples
21870// your audio to keep it synchronized with the video. Note that enabling this
21871// setting will slightly reduce the duration of your video. Required settings:
21872// You must also set Framerate to 25. In your JSON job specification, set (framerateControl)
21873// to (SPECIFIED), (framerateNumerator) to 25 and (framerateDenominator) to
21874// 1.
21875const (
21876	// AvcIntraSlowPalDisabled is a AvcIntraSlowPal enum value
21877	AvcIntraSlowPalDisabled = "DISABLED"
21878
21879	// AvcIntraSlowPalEnabled is a AvcIntraSlowPal enum value
21880	AvcIntraSlowPalEnabled = "ENABLED"
21881)
21882
21883// AvcIntraSlowPal_Values returns all elements of the AvcIntraSlowPal enum
21884func AvcIntraSlowPal_Values() []string {
21885	return []string{
21886		AvcIntraSlowPalDisabled,
21887		AvcIntraSlowPalEnabled,
21888	}
21889}
21890
21891// When you do frame rate conversion from 23.976 frames per second (fps) to
21892// 29.97 fps, and your output scan type is interlaced, you can optionally enable
21893// hard telecine (HARD) to create a smoother picture. When you keep the default
21894// value, None (NONE), MediaConvert does a standard frame rate conversion to
21895// 29.97 without doing anything with the field polarity to create a smoother
21896// picture.
21897const (
21898	// AvcIntraTelecineNone is a AvcIntraTelecine enum value
21899	AvcIntraTelecineNone = "NONE"
21900
21901	// AvcIntraTelecineHard is a AvcIntraTelecine enum value
21902	AvcIntraTelecineHard = "HARD"
21903)
21904
21905// AvcIntraTelecine_Values returns all elements of the AvcIntraTelecine enum
21906func AvcIntraTelecine_Values() []string {
21907	return []string{
21908		AvcIntraTelecineNone,
21909		AvcIntraTelecineHard,
21910	}
21911}
21912
21913// Optional. Use Quality tuning level (qualityTuningLevel) to choose how many
21914// transcoding passes MediaConvert does with your video. When you choose Multi-pass
21915// (MULTI_PASS), your video quality is better and your output bitrate is more
21916// accurate. That is, the actual bitrate of your output is closer to the target
21917// bitrate defined in the specification. When you choose Single-pass (SINGLE_PASS),
21918// your encoding time is faster. The default behavior is Single-pass (SINGLE_PASS).
21919const (
21920	// AvcIntraUhdQualityTuningLevelSinglePass is a AvcIntraUhdQualityTuningLevel enum value
21921	AvcIntraUhdQualityTuningLevelSinglePass = "SINGLE_PASS"
21922
21923	// AvcIntraUhdQualityTuningLevelMultiPass is a AvcIntraUhdQualityTuningLevel enum value
21924	AvcIntraUhdQualityTuningLevelMultiPass = "MULTI_PASS"
21925)
21926
21927// AvcIntraUhdQualityTuningLevel_Values returns all elements of the AvcIntraUhdQualityTuningLevel enum
21928func AvcIntraUhdQualityTuningLevel_Values() []string {
21929	return []string{
21930		AvcIntraUhdQualityTuningLevelSinglePass,
21931		AvcIntraUhdQualityTuningLevelMultiPass,
21932	}
21933}
21934
21935// The tag type that AWS Billing and Cost Management will use to sort your AWS
21936// Elemental MediaConvert costs on any billing report that you set up.
21937const (
21938	// BillingTagsSourceQueue is a BillingTagsSource enum value
21939	BillingTagsSourceQueue = "QUEUE"
21940
21941	// BillingTagsSourcePreset is a BillingTagsSource enum value
21942	BillingTagsSourcePreset = "PRESET"
21943
21944	// BillingTagsSourceJobTemplate is a BillingTagsSource enum value
21945	BillingTagsSourceJobTemplate = "JOB_TEMPLATE"
21946
21947	// BillingTagsSourceJob is a BillingTagsSource enum value
21948	BillingTagsSourceJob = "JOB"
21949)
21950
21951// BillingTagsSource_Values returns all elements of the BillingTagsSource enum
21952func BillingTagsSource_Values() []string {
21953	return []string{
21954		BillingTagsSourceQueue,
21955		BillingTagsSourcePreset,
21956		BillingTagsSourceJobTemplate,
21957		BillingTagsSourceJob,
21958	}
21959}
21960
21961// If no explicit x_position or y_position is provided, setting alignment to
21962// centered will place the captions at the bottom center of the output. Similarly,
21963// setting a left alignment will align captions to the bottom left of the output.
21964// If x and y positions are given in conjunction with the alignment parameter,
21965// the font will be justified (either left or centered) relative to those coordinates.
21966// This option is not valid for source captions that are STL, 608/embedded or
21967// teletext. These source settings are already pre-defined by the caption stream.
21968// All burn-in and DVB-Sub font settings must match.
21969const (
21970	// BurninSubtitleAlignmentCentered is a BurninSubtitleAlignment enum value
21971	BurninSubtitleAlignmentCentered = "CENTERED"
21972
21973	// BurninSubtitleAlignmentLeft is a BurninSubtitleAlignment enum value
21974	BurninSubtitleAlignmentLeft = "LEFT"
21975)
21976
21977// BurninSubtitleAlignment_Values returns all elements of the BurninSubtitleAlignment enum
21978func BurninSubtitleAlignment_Values() []string {
21979	return []string{
21980		BurninSubtitleAlignmentCentered,
21981		BurninSubtitleAlignmentLeft,
21982	}
21983}
21984
21985// Specifies the color of the rectangle behind the captions.All burn-in and
21986// DVB-Sub font settings must match.
21987const (
21988	// BurninSubtitleBackgroundColorNone is a BurninSubtitleBackgroundColor enum value
21989	BurninSubtitleBackgroundColorNone = "NONE"
21990
21991	// BurninSubtitleBackgroundColorBlack is a BurninSubtitleBackgroundColor enum value
21992	BurninSubtitleBackgroundColorBlack = "BLACK"
21993
21994	// BurninSubtitleBackgroundColorWhite is a BurninSubtitleBackgroundColor enum value
21995	BurninSubtitleBackgroundColorWhite = "WHITE"
21996)
21997
21998// BurninSubtitleBackgroundColor_Values returns all elements of the BurninSubtitleBackgroundColor enum
21999func BurninSubtitleBackgroundColor_Values() []string {
22000	return []string{
22001		BurninSubtitleBackgroundColorNone,
22002		BurninSubtitleBackgroundColorBlack,
22003		BurninSubtitleBackgroundColorWhite,
22004	}
22005}
22006
22007// Specifies the color of the burned-in captions. This option is not valid for
22008// source captions that are STL, 608/embedded or teletext. These source settings
22009// are already pre-defined by the caption stream. All burn-in and DVB-Sub font
22010// settings must match.
22011const (
22012	// BurninSubtitleFontColorWhite is a BurninSubtitleFontColor enum value
22013	BurninSubtitleFontColorWhite = "WHITE"
22014
22015	// BurninSubtitleFontColorBlack is a BurninSubtitleFontColor enum value
22016	BurninSubtitleFontColorBlack = "BLACK"
22017
22018	// BurninSubtitleFontColorYellow is a BurninSubtitleFontColor enum value
22019	BurninSubtitleFontColorYellow = "YELLOW"
22020
22021	// BurninSubtitleFontColorRed is a BurninSubtitleFontColor enum value
22022	BurninSubtitleFontColorRed = "RED"
22023
22024	// BurninSubtitleFontColorGreen is a BurninSubtitleFontColor enum value
22025	BurninSubtitleFontColorGreen = "GREEN"
22026
22027	// BurninSubtitleFontColorBlue is a BurninSubtitleFontColor enum value
22028	BurninSubtitleFontColorBlue = "BLUE"
22029)
22030
22031// BurninSubtitleFontColor_Values returns all elements of the BurninSubtitleFontColor enum
22032func BurninSubtitleFontColor_Values() []string {
22033	return []string{
22034		BurninSubtitleFontColorWhite,
22035		BurninSubtitleFontColorBlack,
22036		BurninSubtitleFontColorYellow,
22037		BurninSubtitleFontColorRed,
22038		BurninSubtitleFontColorGreen,
22039		BurninSubtitleFontColorBlue,
22040	}
22041}
22042
22043// Specifies font outline color. This option is not valid for source captions
22044// that are either 608/embedded or teletext. These source settings are already
22045// pre-defined by the caption stream. All burn-in and DVB-Sub font settings
22046// must match.
22047const (
22048	// BurninSubtitleOutlineColorBlack is a BurninSubtitleOutlineColor enum value
22049	BurninSubtitleOutlineColorBlack = "BLACK"
22050
22051	// BurninSubtitleOutlineColorWhite is a BurninSubtitleOutlineColor enum value
22052	BurninSubtitleOutlineColorWhite = "WHITE"
22053
22054	// BurninSubtitleOutlineColorYellow is a BurninSubtitleOutlineColor enum value
22055	BurninSubtitleOutlineColorYellow = "YELLOW"
22056
22057	// BurninSubtitleOutlineColorRed is a BurninSubtitleOutlineColor enum value
22058	BurninSubtitleOutlineColorRed = "RED"
22059
22060	// BurninSubtitleOutlineColorGreen is a BurninSubtitleOutlineColor enum value
22061	BurninSubtitleOutlineColorGreen = "GREEN"
22062
22063	// BurninSubtitleOutlineColorBlue is a BurninSubtitleOutlineColor enum value
22064	BurninSubtitleOutlineColorBlue = "BLUE"
22065)
22066
22067// BurninSubtitleOutlineColor_Values returns all elements of the BurninSubtitleOutlineColor enum
22068func BurninSubtitleOutlineColor_Values() []string {
22069	return []string{
22070		BurninSubtitleOutlineColorBlack,
22071		BurninSubtitleOutlineColorWhite,
22072		BurninSubtitleOutlineColorYellow,
22073		BurninSubtitleOutlineColorRed,
22074		BurninSubtitleOutlineColorGreen,
22075		BurninSubtitleOutlineColorBlue,
22076	}
22077}
22078
22079// Specifies the color of the shadow cast by the captions.All burn-in and DVB-Sub
22080// font settings must match.
22081const (
22082	// BurninSubtitleShadowColorNone is a BurninSubtitleShadowColor enum value
22083	BurninSubtitleShadowColorNone = "NONE"
22084
22085	// BurninSubtitleShadowColorBlack is a BurninSubtitleShadowColor enum value
22086	BurninSubtitleShadowColorBlack = "BLACK"
22087
22088	// BurninSubtitleShadowColorWhite is a BurninSubtitleShadowColor enum value
22089	BurninSubtitleShadowColorWhite = "WHITE"
22090)
22091
22092// BurninSubtitleShadowColor_Values returns all elements of the BurninSubtitleShadowColor enum
22093func BurninSubtitleShadowColor_Values() []string {
22094	return []string{
22095		BurninSubtitleShadowColorNone,
22096		BurninSubtitleShadowColorBlack,
22097		BurninSubtitleShadowColorWhite,
22098	}
22099}
22100
22101// Only applies to jobs with input captions in Teletext or STL formats. Specify
22102// whether the spacing between letters in your captions is set by the captions
22103// grid or varies depending on letter width. Choose fixed grid to conform to
22104// the spacing specified in the captions file more accurately. Choose proportional
22105// to make the text easier to read if the captions are closed caption.
22106const (
22107	// BurninSubtitleTeletextSpacingFixedGrid is a BurninSubtitleTeletextSpacing enum value
22108	BurninSubtitleTeletextSpacingFixedGrid = "FIXED_GRID"
22109
22110	// BurninSubtitleTeletextSpacingProportional is a BurninSubtitleTeletextSpacing enum value
22111	BurninSubtitleTeletextSpacingProportional = "PROPORTIONAL"
22112)
22113
22114// BurninSubtitleTeletextSpacing_Values returns all elements of the BurninSubtitleTeletextSpacing enum
22115func BurninSubtitleTeletextSpacing_Values() []string {
22116	return []string{
22117		BurninSubtitleTeletextSpacingFixedGrid,
22118		BurninSubtitleTeletextSpacingProportional,
22119	}
22120}
22121
22122// Specify the format for this set of captions on this output. The default format
22123// is embedded without SCTE-20. Other options are embedded with SCTE-20, burn-in,
22124// DVB-sub, IMSC, SCC, SRT, teletext, TTML, and web-VTT. If you are using SCTE-20,
22125// choose SCTE-20 plus embedded (SCTE20_PLUS_EMBEDDED) to create an output that
22126// complies with the SCTE-43 spec. To create a non-compliant output where the
22127// embedded captions come first, choose Embedded plus SCTE-20 (EMBEDDED_PLUS_SCTE20).
22128const (
22129	// CaptionDestinationTypeBurnIn is a CaptionDestinationType enum value
22130	CaptionDestinationTypeBurnIn = "BURN_IN"
22131
22132	// CaptionDestinationTypeDvbSub is a CaptionDestinationType enum value
22133	CaptionDestinationTypeDvbSub = "DVB_SUB"
22134
22135	// CaptionDestinationTypeEmbedded is a CaptionDestinationType enum value
22136	CaptionDestinationTypeEmbedded = "EMBEDDED"
22137
22138	// CaptionDestinationTypeEmbeddedPlusScte20 is a CaptionDestinationType enum value
22139	CaptionDestinationTypeEmbeddedPlusScte20 = "EMBEDDED_PLUS_SCTE20"
22140
22141	// CaptionDestinationTypeImsc is a CaptionDestinationType enum value
22142	CaptionDestinationTypeImsc = "IMSC"
22143
22144	// CaptionDestinationTypeScte20PlusEmbedded is a CaptionDestinationType enum value
22145	CaptionDestinationTypeScte20PlusEmbedded = "SCTE20_PLUS_EMBEDDED"
22146
22147	// CaptionDestinationTypeScc is a CaptionDestinationType enum value
22148	CaptionDestinationTypeScc = "SCC"
22149
22150	// CaptionDestinationTypeSrt is a CaptionDestinationType enum value
22151	CaptionDestinationTypeSrt = "SRT"
22152
22153	// CaptionDestinationTypeSmi is a CaptionDestinationType enum value
22154	CaptionDestinationTypeSmi = "SMI"
22155
22156	// CaptionDestinationTypeTeletext is a CaptionDestinationType enum value
22157	CaptionDestinationTypeTeletext = "TELETEXT"
22158
22159	// CaptionDestinationTypeTtml is a CaptionDestinationType enum value
22160	CaptionDestinationTypeTtml = "TTML"
22161
22162	// CaptionDestinationTypeWebvtt is a CaptionDestinationType enum value
22163	CaptionDestinationTypeWebvtt = "WEBVTT"
22164)
22165
22166// CaptionDestinationType_Values returns all elements of the CaptionDestinationType enum
22167func CaptionDestinationType_Values() []string {
22168	return []string{
22169		CaptionDestinationTypeBurnIn,
22170		CaptionDestinationTypeDvbSub,
22171		CaptionDestinationTypeEmbedded,
22172		CaptionDestinationTypeEmbeddedPlusScte20,
22173		CaptionDestinationTypeImsc,
22174		CaptionDestinationTypeScte20PlusEmbedded,
22175		CaptionDestinationTypeScc,
22176		CaptionDestinationTypeSrt,
22177		CaptionDestinationTypeSmi,
22178		CaptionDestinationTypeTeletext,
22179		CaptionDestinationTypeTtml,
22180		CaptionDestinationTypeWebvtt,
22181	}
22182}
22183
22184// Use Source (SourceType) to identify the format of your input captions. The
22185// service cannot auto-detect caption format.
22186const (
22187	// CaptionSourceTypeAncillary is a CaptionSourceType enum value
22188	CaptionSourceTypeAncillary = "ANCILLARY"
22189
22190	// CaptionSourceTypeDvbSub is a CaptionSourceType enum value
22191	CaptionSourceTypeDvbSub = "DVB_SUB"
22192
22193	// CaptionSourceTypeEmbedded is a CaptionSourceType enum value
22194	CaptionSourceTypeEmbedded = "EMBEDDED"
22195
22196	// CaptionSourceTypeScte20 is a CaptionSourceType enum value
22197	CaptionSourceTypeScte20 = "SCTE20"
22198
22199	// CaptionSourceTypeScc is a CaptionSourceType enum value
22200	CaptionSourceTypeScc = "SCC"
22201
22202	// CaptionSourceTypeTtml is a CaptionSourceType enum value
22203	CaptionSourceTypeTtml = "TTML"
22204
22205	// CaptionSourceTypeStl is a CaptionSourceType enum value
22206	CaptionSourceTypeStl = "STL"
22207
22208	// CaptionSourceTypeSrt is a CaptionSourceType enum value
22209	CaptionSourceTypeSrt = "SRT"
22210
22211	// CaptionSourceTypeSmi is a CaptionSourceType enum value
22212	CaptionSourceTypeSmi = "SMI"
22213
22214	// CaptionSourceTypeSmpteTt is a CaptionSourceType enum value
22215	CaptionSourceTypeSmpteTt = "SMPTE_TT"
22216
22217	// CaptionSourceTypeTeletext is a CaptionSourceType enum value
22218	CaptionSourceTypeTeletext = "TELETEXT"
22219
22220	// CaptionSourceTypeNullSource is a CaptionSourceType enum value
22221	CaptionSourceTypeNullSource = "NULL_SOURCE"
22222
22223	// CaptionSourceTypeImsc is a CaptionSourceType enum value
22224	CaptionSourceTypeImsc = "IMSC"
22225)
22226
22227// CaptionSourceType_Values returns all elements of the CaptionSourceType enum
22228func CaptionSourceType_Values() []string {
22229	return []string{
22230		CaptionSourceTypeAncillary,
22231		CaptionSourceTypeDvbSub,
22232		CaptionSourceTypeEmbedded,
22233		CaptionSourceTypeScte20,
22234		CaptionSourceTypeScc,
22235		CaptionSourceTypeTtml,
22236		CaptionSourceTypeStl,
22237		CaptionSourceTypeSrt,
22238		CaptionSourceTypeSmi,
22239		CaptionSourceTypeSmpteTt,
22240		CaptionSourceTypeTeletext,
22241		CaptionSourceTypeNullSource,
22242		CaptionSourceTypeImsc,
22243	}
22244}
22245
22246// Disable this setting only when your workflow requires the #EXT-X-ALLOW-CACHE:no
22247// tag. Otherwise, keep the default value Enabled (ENABLED) and control caching
22248// in your video distribution set up. For example, use the Cache-Control http
22249// header.
22250const (
22251	// CmafClientCacheDisabled is a CmafClientCache enum value
22252	CmafClientCacheDisabled = "DISABLED"
22253
22254	// CmafClientCacheEnabled is a CmafClientCache enum value
22255	CmafClientCacheEnabled = "ENABLED"
22256)
22257
22258// CmafClientCache_Values returns all elements of the CmafClientCache enum
22259func CmafClientCache_Values() []string {
22260	return []string{
22261		CmafClientCacheDisabled,
22262		CmafClientCacheEnabled,
22263	}
22264}
22265
22266// Specification to use (RFC-6381 or the default RFC-4281) during m3u8 playlist
22267// generation.
22268const (
22269	// CmafCodecSpecificationRfc6381 is a CmafCodecSpecification enum value
22270	CmafCodecSpecificationRfc6381 = "RFC_6381"
22271
22272	// CmafCodecSpecificationRfc4281 is a CmafCodecSpecification enum value
22273	CmafCodecSpecificationRfc4281 = "RFC_4281"
22274)
22275
22276// CmafCodecSpecification_Values returns all elements of the CmafCodecSpecification enum
22277func CmafCodecSpecification_Values() []string {
22278	return []string{
22279		CmafCodecSpecificationRfc6381,
22280		CmafCodecSpecificationRfc4281,
22281	}
22282}
22283
22284// Specify the encryption scheme that you want the service to use when encrypting
22285// your CMAF segments. Choose AES-CBC subsample (SAMPLE-AES) or AES_CTR (AES-CTR).
22286const (
22287	// CmafEncryptionTypeSampleAes is a CmafEncryptionType enum value
22288	CmafEncryptionTypeSampleAes = "SAMPLE_AES"
22289
22290	// CmafEncryptionTypeAesCtr is a CmafEncryptionType enum value
22291	CmafEncryptionTypeAesCtr = "AES_CTR"
22292)
22293
22294// CmafEncryptionType_Values returns all elements of the CmafEncryptionType enum
22295func CmafEncryptionType_Values() []string {
22296	return []string{
22297		CmafEncryptionTypeSampleAes,
22298		CmafEncryptionTypeAesCtr,
22299	}
22300}
22301
22302// When you use DRM with CMAF outputs, choose whether the service writes the
22303// 128-bit encryption initialization vector in the HLS and DASH manifests.
22304const (
22305	// CmafInitializationVectorInManifestInclude is a CmafInitializationVectorInManifest enum value
22306	CmafInitializationVectorInManifestInclude = "INCLUDE"
22307
22308	// CmafInitializationVectorInManifestExclude is a CmafInitializationVectorInManifest enum value
22309	CmafInitializationVectorInManifestExclude = "EXCLUDE"
22310)
22311
22312// CmafInitializationVectorInManifest_Values returns all elements of the CmafInitializationVectorInManifest enum
22313func CmafInitializationVectorInManifest_Values() []string {
22314	return []string{
22315		CmafInitializationVectorInManifestInclude,
22316		CmafInitializationVectorInManifestExclude,
22317	}
22318}
22319
22320// Specify whether your DRM encryption key is static or from a key provider
22321// that follows the SPEKE standard. For more information about SPEKE, see https://docs.aws.amazon.com/speke/latest/documentation/what-is-speke.html.
22322const (
22323	// CmafKeyProviderTypeSpeke is a CmafKeyProviderType enum value
22324	CmafKeyProviderTypeSpeke = "SPEKE"
22325
22326	// CmafKeyProviderTypeStaticKey is a CmafKeyProviderType enum value
22327	CmafKeyProviderTypeStaticKey = "STATIC_KEY"
22328)
22329
22330// CmafKeyProviderType_Values returns all elements of the CmafKeyProviderType enum
22331func CmafKeyProviderType_Values() []string {
22332	return []string{
22333		CmafKeyProviderTypeSpeke,
22334		CmafKeyProviderTypeStaticKey,
22335	}
22336}
22337
22338// When set to GZIP, compresses HLS playlist.
22339const (
22340	// CmafManifestCompressionGzip is a CmafManifestCompression enum value
22341	CmafManifestCompressionGzip = "GZIP"
22342
22343	// CmafManifestCompressionNone is a CmafManifestCompression enum value
22344	CmafManifestCompressionNone = "NONE"
22345)
22346
22347// CmafManifestCompression_Values returns all elements of the CmafManifestCompression enum
22348func CmafManifestCompression_Values() []string {
22349	return []string{
22350		CmafManifestCompressionGzip,
22351		CmafManifestCompressionNone,
22352	}
22353}
22354
22355// Indicates whether the output manifest should use floating point values for
22356// segment duration.
22357const (
22358	// CmafManifestDurationFormatFloatingPoint is a CmafManifestDurationFormat enum value
22359	CmafManifestDurationFormatFloatingPoint = "FLOATING_POINT"
22360
22361	// CmafManifestDurationFormatInteger is a CmafManifestDurationFormat enum value
22362	CmafManifestDurationFormatInteger = "INTEGER"
22363)
22364
22365// CmafManifestDurationFormat_Values returns all elements of the CmafManifestDurationFormat enum
22366func CmafManifestDurationFormat_Values() []string {
22367	return []string{
22368		CmafManifestDurationFormatFloatingPoint,
22369		CmafManifestDurationFormatInteger,
22370	}
22371}
22372
22373// Specify whether your DASH profile is on-demand or main. When you choose Main
22374// profile (MAIN_PROFILE), the service signals urn:mpeg:dash:profile:isoff-main:2011
22375// in your .mpd DASH manifest. When you choose On-demand (ON_DEMAND_PROFILE),
22376// the service signals urn:mpeg:dash:profile:isoff-on-demand:2011 in your .mpd.
22377// When you choose On-demand, you must also set the output group setting Segment
22378// control (SegmentControl) to Single file (SINGLE_FILE).
22379const (
22380	// CmafMpdProfileMainProfile is a CmafMpdProfile enum value
22381	CmafMpdProfileMainProfile = "MAIN_PROFILE"
22382
22383	// CmafMpdProfileOnDemandProfile is a CmafMpdProfile enum value
22384	CmafMpdProfileOnDemandProfile = "ON_DEMAND_PROFILE"
22385)
22386
22387// CmafMpdProfile_Values returns all elements of the CmafMpdProfile enum
22388func CmafMpdProfile_Values() []string {
22389	return []string{
22390		CmafMpdProfileMainProfile,
22391		CmafMpdProfileOnDemandProfile,
22392	}
22393}
22394
22395// When set to SINGLE_FILE, a single output file is generated, which is internally
22396// segmented using the Fragment Length and Segment Length. When set to SEGMENTED_FILES,
22397// separate segment files will be created.
22398const (
22399	// CmafSegmentControlSingleFile is a CmafSegmentControl enum value
22400	CmafSegmentControlSingleFile = "SINGLE_FILE"
22401
22402	// CmafSegmentControlSegmentedFiles is a CmafSegmentControl enum value
22403	CmafSegmentControlSegmentedFiles = "SEGMENTED_FILES"
22404)
22405
22406// CmafSegmentControl_Values returns all elements of the CmafSegmentControl enum
22407func CmafSegmentControl_Values() []string {
22408	return []string{
22409		CmafSegmentControlSingleFile,
22410		CmafSegmentControlSegmentedFiles,
22411	}
22412}
22413
22414// Include or exclude RESOLUTION attribute for video in EXT-X-STREAM-INF tag
22415// of variant manifest.
22416const (
22417	// CmafStreamInfResolutionInclude is a CmafStreamInfResolution enum value
22418	CmafStreamInfResolutionInclude = "INCLUDE"
22419
22420	// CmafStreamInfResolutionExclude is a CmafStreamInfResolution enum value
22421	CmafStreamInfResolutionExclude = "EXCLUDE"
22422)
22423
22424// CmafStreamInfResolution_Values returns all elements of the CmafStreamInfResolution enum
22425func CmafStreamInfResolution_Values() []string {
22426	return []string{
22427		CmafStreamInfResolutionInclude,
22428		CmafStreamInfResolutionExclude,
22429	}
22430}
22431
22432// When set to ENABLED, a DASH MPD manifest will be generated for this output.
22433const (
22434	// CmafWriteDASHManifestDisabled is a CmafWriteDASHManifest enum value
22435	CmafWriteDASHManifestDisabled = "DISABLED"
22436
22437	// CmafWriteDASHManifestEnabled is a CmafWriteDASHManifest enum value
22438	CmafWriteDASHManifestEnabled = "ENABLED"
22439)
22440
22441// CmafWriteDASHManifest_Values returns all elements of the CmafWriteDASHManifest enum
22442func CmafWriteDASHManifest_Values() []string {
22443	return []string{
22444		CmafWriteDASHManifestDisabled,
22445		CmafWriteDASHManifestEnabled,
22446	}
22447}
22448
22449// When set to ENABLED, an Apple HLS manifest will be generated for this output.
22450const (
22451	// CmafWriteHLSManifestDisabled is a CmafWriteHLSManifest enum value
22452	CmafWriteHLSManifestDisabled = "DISABLED"
22453
22454	// CmafWriteHLSManifestEnabled is a CmafWriteHLSManifest enum value
22455	CmafWriteHLSManifestEnabled = "ENABLED"
22456)
22457
22458// CmafWriteHLSManifest_Values returns all elements of the CmafWriteHLSManifest enum
22459func CmafWriteHLSManifest_Values() []string {
22460	return []string{
22461		CmafWriteHLSManifestDisabled,
22462		CmafWriteHLSManifestEnabled,
22463	}
22464}
22465
22466// When you enable Precise segment duration in DASH manifests (writeSegmentTimelineInRepresentation),
22467// your DASH manifest shows precise segment durations. The segment duration
22468// information appears inside the SegmentTimeline element, inside SegmentTemplate
22469// at the Representation level. When this feature isn't enabled, the segment
22470// durations in your DASH manifest are approximate. The segment duration information
22471// appears in the duration attribute of the SegmentTemplate element.
22472const (
22473	// CmafWriteSegmentTimelineInRepresentationEnabled is a CmafWriteSegmentTimelineInRepresentation enum value
22474	CmafWriteSegmentTimelineInRepresentationEnabled = "ENABLED"
22475
22476	// CmafWriteSegmentTimelineInRepresentationDisabled is a CmafWriteSegmentTimelineInRepresentation enum value
22477	CmafWriteSegmentTimelineInRepresentationDisabled = "DISABLED"
22478)
22479
22480// CmafWriteSegmentTimelineInRepresentation_Values returns all elements of the CmafWriteSegmentTimelineInRepresentation enum
22481func CmafWriteSegmentTimelineInRepresentation_Values() []string {
22482	return []string{
22483		CmafWriteSegmentTimelineInRepresentationEnabled,
22484		CmafWriteSegmentTimelineInRepresentationDisabled,
22485	}
22486}
22487
22488// Specify this setting only when your output will be consumed by a downstream
22489// repackaging workflow that is sensitive to very small duration differences
22490// between video and audio. For this situation, choose Match video duration
22491// (MATCH_VIDEO_DURATION). In all other cases, keep the default value, Default
22492// codec duration (DEFAULT_CODEC_DURATION). When you choose Match video duration,
22493// MediaConvert pads the output audio streams with silence or trims them to
22494// ensure that the total duration of each audio stream is at least as long as
22495// the total duration of the video stream. After padding or trimming, the audio
22496// stream duration is no more than one frame longer than the video stream. MediaConvert
22497// applies audio padding or trimming only to the end of the last segment of
22498// the output. For unsegmented outputs, MediaConvert adds padding only to the
22499// end of the file. When you keep the default value, any minor discrepancies
22500// between audio and video duration will depend on your output audio codec.
22501const (
22502	// CmfcAudioDurationDefaultCodecDuration is a CmfcAudioDuration enum value
22503	CmfcAudioDurationDefaultCodecDuration = "DEFAULT_CODEC_DURATION"
22504
22505	// CmfcAudioDurationMatchVideoDuration is a CmfcAudioDuration enum value
22506	CmfcAudioDurationMatchVideoDuration = "MATCH_VIDEO_DURATION"
22507)
22508
22509// CmfcAudioDuration_Values returns all elements of the CmfcAudioDuration enum
22510func CmfcAudioDuration_Values() []string {
22511	return []string{
22512		CmfcAudioDurationDefaultCodecDuration,
22513		CmfcAudioDurationMatchVideoDuration,
22514	}
22515}
22516
22517// Choose Include (INCLUDE) to have MediaConvert generate an HLS child manifest
22518// that lists only the I-frames for this rendition, in addition to your regular
22519// manifest for this rendition. You might use this manifest as part of a workflow
22520// that creates preview functions for your video. MediaConvert adds both the
22521// I-frame only child manifest and the regular child manifest to the parent
22522// manifest. When you don't need the I-frame only child manifest, keep the default
22523// value Exclude (EXCLUDE).
22524const (
22525	// CmfcIFrameOnlyManifestInclude is a CmfcIFrameOnlyManifest enum value
22526	CmfcIFrameOnlyManifestInclude = "INCLUDE"
22527
22528	// CmfcIFrameOnlyManifestExclude is a CmfcIFrameOnlyManifest enum value
22529	CmfcIFrameOnlyManifestExclude = "EXCLUDE"
22530)
22531
22532// CmfcIFrameOnlyManifest_Values returns all elements of the CmfcIFrameOnlyManifest enum
22533func CmfcIFrameOnlyManifest_Values() []string {
22534	return []string{
22535		CmfcIFrameOnlyManifestInclude,
22536		CmfcIFrameOnlyManifestExclude,
22537	}
22538}
22539
22540// Use this setting only when you specify SCTE-35 markers from ESAM. Choose
22541// INSERT to put SCTE-35 markers in this output at the insertion points that
22542// you specify in an ESAM XML document. Provide the document in the setting
22543// SCC XML (sccXml).
22544const (
22545	// CmfcScte35EsamInsert is a CmfcScte35Esam enum value
22546	CmfcScte35EsamInsert = "INSERT"
22547
22548	// CmfcScte35EsamNone is a CmfcScte35Esam enum value
22549	CmfcScte35EsamNone = "NONE"
22550)
22551
22552// CmfcScte35Esam_Values returns all elements of the CmfcScte35Esam enum
22553func CmfcScte35Esam_Values() []string {
22554	return []string{
22555		CmfcScte35EsamInsert,
22556		CmfcScte35EsamNone,
22557	}
22558}
22559
22560// Ignore this setting unless you have SCTE-35 markers in your input video file.
22561// Choose Passthrough (PASSTHROUGH) if you want SCTE-35 markers that appear
22562// in your input to also appear in this output. Choose None (NONE) if you don't
22563// want those SCTE-35 markers in this output.
22564const (
22565	// CmfcScte35SourcePassthrough is a CmfcScte35Source enum value
22566	CmfcScte35SourcePassthrough = "PASSTHROUGH"
22567
22568	// CmfcScte35SourceNone is a CmfcScte35Source enum value
22569	CmfcScte35SourceNone = "NONE"
22570)
22571
22572// CmfcScte35Source_Values returns all elements of the CmfcScte35Source enum
22573func CmfcScte35Source_Values() []string {
22574	return []string{
22575		CmfcScte35SourcePassthrough,
22576		CmfcScte35SourceNone,
22577	}
22578}
22579
22580// Choose Insert (INSERT) for this setting to include color metadata in this
22581// output. Choose Ignore (IGNORE) to exclude color metadata from this output.
22582// If you don't specify a value, the service sets this to Insert by default.
22583const (
22584	// ColorMetadataIgnore is a ColorMetadata enum value
22585	ColorMetadataIgnore = "IGNORE"
22586
22587	// ColorMetadataInsert is a ColorMetadata enum value
22588	ColorMetadataInsert = "INSERT"
22589)
22590
22591// ColorMetadata_Values returns all elements of the ColorMetadata enum
22592func ColorMetadata_Values() []string {
22593	return []string{
22594		ColorMetadataIgnore,
22595		ColorMetadataInsert,
22596	}
22597}
22598
22599// If your input video has accurate color space metadata, or if you don't know
22600// about color space, leave this set to the default value Follow (FOLLOW). The
22601// service will automatically detect your input color space. If your input video
22602// has metadata indicating the wrong color space, specify the accurate color
22603// space here. If your input video is HDR 10 and the SMPTE ST 2086 Mastering
22604// Display Color Volume static metadata isn't present in your video stream,
22605// or if that metadata is present but not accurate, choose Force HDR 10 (FORCE_HDR10)
22606// here and specify correct values in the input HDR 10 metadata (Hdr10Metadata)
22607// settings. For more information about MediaConvert HDR jobs, see https://docs.aws.amazon.com/console/mediaconvert/hdr.
22608const (
22609	// ColorSpaceFollow is a ColorSpace enum value
22610	ColorSpaceFollow = "FOLLOW"
22611
22612	// ColorSpaceRec601 is a ColorSpace enum value
22613	ColorSpaceRec601 = "REC_601"
22614
22615	// ColorSpaceRec709 is a ColorSpace enum value
22616	ColorSpaceRec709 = "REC_709"
22617
22618	// ColorSpaceHdr10 is a ColorSpace enum value
22619	ColorSpaceHdr10 = "HDR10"
22620
22621	// ColorSpaceHlg2020 is a ColorSpace enum value
22622	ColorSpaceHlg2020 = "HLG_2020"
22623)
22624
22625// ColorSpace_Values returns all elements of the ColorSpace enum
22626func ColorSpace_Values() []string {
22627	return []string{
22628		ColorSpaceFollow,
22629		ColorSpaceRec601,
22630		ColorSpaceRec709,
22631		ColorSpaceHdr10,
22632		ColorSpaceHlg2020,
22633	}
22634}
22635
22636// Specify the color space you want for this output. The service supports conversion
22637// between HDR formats, between SDR formats, from SDR to HDR, and from HDR to
22638// SDR. SDR to HDR conversion doesn't upgrade the dynamic range. The converted
22639// video has an HDR format, but visually appears the same as an unconverted
22640// output. HDR to SDR conversion uses Elemental tone mapping technology to approximate
22641// the outcome of manually regrading from HDR to SDR.
22642const (
22643	// ColorSpaceConversionNone is a ColorSpaceConversion enum value
22644	ColorSpaceConversionNone = "NONE"
22645
22646	// ColorSpaceConversionForce601 is a ColorSpaceConversion enum value
22647	ColorSpaceConversionForce601 = "FORCE_601"
22648
22649	// ColorSpaceConversionForce709 is a ColorSpaceConversion enum value
22650	ColorSpaceConversionForce709 = "FORCE_709"
22651
22652	// ColorSpaceConversionForceHdr10 is a ColorSpaceConversion enum value
22653	ColorSpaceConversionForceHdr10 = "FORCE_HDR10"
22654
22655	// ColorSpaceConversionForceHlg2020 is a ColorSpaceConversion enum value
22656	ColorSpaceConversionForceHlg2020 = "FORCE_HLG_2020"
22657)
22658
22659// ColorSpaceConversion_Values returns all elements of the ColorSpaceConversion enum
22660func ColorSpaceConversion_Values() []string {
22661	return []string{
22662		ColorSpaceConversionNone,
22663		ColorSpaceConversionForce601,
22664		ColorSpaceConversionForce709,
22665		ColorSpaceConversionForceHdr10,
22666		ColorSpaceConversionForceHlg2020,
22667	}
22668}
22669
22670// There are two sources for color metadata, the input file and the job input
22671// settings Color space (ColorSpace) and HDR master display information settings(Hdr10Metadata).
22672// The Color space usage setting determines which takes precedence. Choose Force
22673// (FORCE) to use color metadata from the input job settings. If you don't specify
22674// values for those settings, the service defaults to using metadata from your
22675// input. FALLBACK - Choose Fallback (FALLBACK) to use color metadata from the
22676// source when it is present. If there's no color metadata in your input file,
22677// the service defaults to using values you specify in the input settings.
22678const (
22679	// ColorSpaceUsageForce is a ColorSpaceUsage enum value
22680	ColorSpaceUsageForce = "FORCE"
22681
22682	// ColorSpaceUsageFallback is a ColorSpaceUsage enum value
22683	ColorSpaceUsageFallback = "FALLBACK"
22684)
22685
22686// ColorSpaceUsage_Values returns all elements of the ColorSpaceUsage enum
22687func ColorSpaceUsage_Values() []string {
22688	return []string{
22689		ColorSpaceUsageForce,
22690		ColorSpaceUsageFallback,
22691	}
22692}
22693
22694// The length of the term of your reserved queue pricing plan commitment.
22695const (
22696	// CommitmentOneYear is a Commitment enum value
22697	CommitmentOneYear = "ONE_YEAR"
22698)
22699
22700// Commitment_Values returns all elements of the Commitment enum
22701func Commitment_Values() []string {
22702	return []string{
22703		CommitmentOneYear,
22704	}
22705}
22706
22707// Container for this output. Some containers require a container settings object.
22708// If not specified, the default object will be created.
22709const (
22710	// ContainerTypeF4v is a ContainerType enum value
22711	ContainerTypeF4v = "F4V"
22712
22713	// ContainerTypeIsmv is a ContainerType enum value
22714	ContainerTypeIsmv = "ISMV"
22715
22716	// ContainerTypeM2ts is a ContainerType enum value
22717	ContainerTypeM2ts = "M2TS"
22718
22719	// ContainerTypeM3u8 is a ContainerType enum value
22720	ContainerTypeM3u8 = "M3U8"
22721
22722	// ContainerTypeCmfc is a ContainerType enum value
22723	ContainerTypeCmfc = "CMFC"
22724
22725	// ContainerTypeMov is a ContainerType enum value
22726	ContainerTypeMov = "MOV"
22727
22728	// ContainerTypeMp4 is a ContainerType enum value
22729	ContainerTypeMp4 = "MP4"
22730
22731	// ContainerTypeMpd is a ContainerType enum value
22732	ContainerTypeMpd = "MPD"
22733
22734	// ContainerTypeMxf is a ContainerType enum value
22735	ContainerTypeMxf = "MXF"
22736
22737	// ContainerTypeWebm is a ContainerType enum value
22738	ContainerTypeWebm = "WEBM"
22739
22740	// ContainerTypeRaw is a ContainerType enum value
22741	ContainerTypeRaw = "RAW"
22742)
22743
22744// ContainerType_Values returns all elements of the ContainerType enum
22745func ContainerType_Values() []string {
22746	return []string{
22747		ContainerTypeF4v,
22748		ContainerTypeIsmv,
22749		ContainerTypeM2ts,
22750		ContainerTypeM3u8,
22751		ContainerTypeCmfc,
22752		ContainerTypeMov,
22753		ContainerTypeMp4,
22754		ContainerTypeMpd,
22755		ContainerTypeMxf,
22756		ContainerTypeWebm,
22757		ContainerTypeRaw,
22758	}
22759}
22760
22761// Supports HbbTV specification as indicated
22762const (
22763	// DashIsoHbbtvComplianceHbbtv15 is a DashIsoHbbtvCompliance enum value
22764	DashIsoHbbtvComplianceHbbtv15 = "HBBTV_1_5"
22765
22766	// DashIsoHbbtvComplianceNone is a DashIsoHbbtvCompliance enum value
22767	DashIsoHbbtvComplianceNone = "NONE"
22768)
22769
22770// DashIsoHbbtvCompliance_Values returns all elements of the DashIsoHbbtvCompliance enum
22771func DashIsoHbbtvCompliance_Values() []string {
22772	return []string{
22773		DashIsoHbbtvComplianceHbbtv15,
22774		DashIsoHbbtvComplianceNone,
22775	}
22776}
22777
22778// Specify whether your DASH profile is on-demand or main. When you choose Main
22779// profile (MAIN_PROFILE), the service signals urn:mpeg:dash:profile:isoff-main:2011
22780// in your .mpd DASH manifest. When you choose On-demand (ON_DEMAND_PROFILE),
22781// the service signals urn:mpeg:dash:profile:isoff-on-demand:2011 in your .mpd.
22782// When you choose On-demand, you must also set the output group setting Segment
22783// control (SegmentControl) to Single file (SINGLE_FILE).
22784const (
22785	// DashIsoMpdProfileMainProfile is a DashIsoMpdProfile enum value
22786	DashIsoMpdProfileMainProfile = "MAIN_PROFILE"
22787
22788	// DashIsoMpdProfileOnDemandProfile is a DashIsoMpdProfile enum value
22789	DashIsoMpdProfileOnDemandProfile = "ON_DEMAND_PROFILE"
22790)
22791
22792// DashIsoMpdProfile_Values returns all elements of the DashIsoMpdProfile enum
22793func DashIsoMpdProfile_Values() []string {
22794	return []string{
22795		DashIsoMpdProfileMainProfile,
22796		DashIsoMpdProfileOnDemandProfile,
22797	}
22798}
22799
22800// This setting can improve the compatibility of your output with video players
22801// on obsolete devices. It applies only to DASH H.264 outputs with DRM encryption.
22802// Choose Unencrypted SEI (UNENCRYPTED_SEI) only to correct problems with playback
22803// on older devices. Otherwise, keep the default setting CENC v1 (CENC_V1).
22804// If you choose Unencrypted SEI, for that output, the service will exclude
22805// the access unit delimiter and will leave the SEI NAL units unencrypted.
22806const (
22807	// DashIsoPlaybackDeviceCompatibilityCencV1 is a DashIsoPlaybackDeviceCompatibility enum value
22808	DashIsoPlaybackDeviceCompatibilityCencV1 = "CENC_V1"
22809
22810	// DashIsoPlaybackDeviceCompatibilityUnencryptedSei is a DashIsoPlaybackDeviceCompatibility enum value
22811	DashIsoPlaybackDeviceCompatibilityUnencryptedSei = "UNENCRYPTED_SEI"
22812)
22813
22814// DashIsoPlaybackDeviceCompatibility_Values returns all elements of the DashIsoPlaybackDeviceCompatibility enum
22815func DashIsoPlaybackDeviceCompatibility_Values() []string {
22816	return []string{
22817		DashIsoPlaybackDeviceCompatibilityCencV1,
22818		DashIsoPlaybackDeviceCompatibilityUnencryptedSei,
22819	}
22820}
22821
22822// When set to SINGLE_FILE, a single output file is generated, which is internally
22823// segmented using the Fragment Length and Segment Length. When set to SEGMENTED_FILES,
22824// separate segment files will be created.
22825const (
22826	// DashIsoSegmentControlSingleFile is a DashIsoSegmentControl enum value
22827	DashIsoSegmentControlSingleFile = "SINGLE_FILE"
22828
22829	// DashIsoSegmentControlSegmentedFiles is a DashIsoSegmentControl enum value
22830	DashIsoSegmentControlSegmentedFiles = "SEGMENTED_FILES"
22831)
22832
22833// DashIsoSegmentControl_Values returns all elements of the DashIsoSegmentControl enum
22834func DashIsoSegmentControl_Values() []string {
22835	return []string{
22836		DashIsoSegmentControlSingleFile,
22837		DashIsoSegmentControlSegmentedFiles,
22838	}
22839}
22840
22841// When you enable Precise segment duration in manifests (writeSegmentTimelineInRepresentation),
22842// your DASH manifest shows precise segment durations. The segment duration
22843// information appears inside the SegmentTimeline element, inside SegmentTemplate
22844// at the Representation level. When this feature isn't enabled, the segment
22845// durations in your DASH manifest are approximate. The segment duration information
22846// appears in the duration attribute of the SegmentTemplate element.
22847const (
22848	// DashIsoWriteSegmentTimelineInRepresentationEnabled is a DashIsoWriteSegmentTimelineInRepresentation enum value
22849	DashIsoWriteSegmentTimelineInRepresentationEnabled = "ENABLED"
22850
22851	// DashIsoWriteSegmentTimelineInRepresentationDisabled is a DashIsoWriteSegmentTimelineInRepresentation enum value
22852	DashIsoWriteSegmentTimelineInRepresentationDisabled = "DISABLED"
22853)
22854
22855// DashIsoWriteSegmentTimelineInRepresentation_Values returns all elements of the DashIsoWriteSegmentTimelineInRepresentation enum
22856func DashIsoWriteSegmentTimelineInRepresentation_Values() []string {
22857	return []string{
22858		DashIsoWriteSegmentTimelineInRepresentationEnabled,
22859		DashIsoWriteSegmentTimelineInRepresentationDisabled,
22860	}
22861}
22862
22863// Specify the encryption mode that you used to encrypt your input files.
22864const (
22865	// DecryptionModeAesCtr is a DecryptionMode enum value
22866	DecryptionModeAesCtr = "AES_CTR"
22867
22868	// DecryptionModeAesCbc is a DecryptionMode enum value
22869	DecryptionModeAesCbc = "AES_CBC"
22870
22871	// DecryptionModeAesGcm is a DecryptionMode enum value
22872	DecryptionModeAesGcm = "AES_GCM"
22873)
22874
22875// DecryptionMode_Values returns all elements of the DecryptionMode enum
22876func DecryptionMode_Values() []string {
22877	return []string{
22878		DecryptionModeAesCtr,
22879		DecryptionModeAesCbc,
22880		DecryptionModeAesGcm,
22881	}
22882}
22883
22884// Only applies when you set Deinterlacer (DeinterlaceMode) to Deinterlace (DEINTERLACE)
22885// or Adaptive (ADAPTIVE). Motion adaptive interpolate (INTERPOLATE) produces
22886// sharper pictures, while blend (BLEND) produces smoother motion. Use (INTERPOLATE_TICKER)
22887// OR (BLEND_TICKER) if your source file includes a ticker, such as a scrolling
22888// headline at the bottom of the frame.
22889const (
22890	// DeinterlaceAlgorithmInterpolate is a DeinterlaceAlgorithm enum value
22891	DeinterlaceAlgorithmInterpolate = "INTERPOLATE"
22892
22893	// DeinterlaceAlgorithmInterpolateTicker is a DeinterlaceAlgorithm enum value
22894	DeinterlaceAlgorithmInterpolateTicker = "INTERPOLATE_TICKER"
22895
22896	// DeinterlaceAlgorithmBlend is a DeinterlaceAlgorithm enum value
22897	DeinterlaceAlgorithmBlend = "BLEND"
22898
22899	// DeinterlaceAlgorithmBlendTicker is a DeinterlaceAlgorithm enum value
22900	DeinterlaceAlgorithmBlendTicker = "BLEND_TICKER"
22901)
22902
22903// DeinterlaceAlgorithm_Values returns all elements of the DeinterlaceAlgorithm enum
22904func DeinterlaceAlgorithm_Values() []string {
22905	return []string{
22906		DeinterlaceAlgorithmInterpolate,
22907		DeinterlaceAlgorithmInterpolateTicker,
22908		DeinterlaceAlgorithmBlend,
22909		DeinterlaceAlgorithmBlendTicker,
22910	}
22911}
22912
22913// - When set to NORMAL (default), the deinterlacer does not convert frames
22914// that are tagged in metadata as progressive. It will only convert those that
22915// are tagged as some other type. - When set to FORCE_ALL_FRAMES, the deinterlacer
22916// converts every frame to progressive - even those that are already tagged
22917// as progressive. Turn Force mode on only if there is a good chance that the
22918// metadata has tagged frames as progressive when they are not progressive.
22919// Do not turn on otherwise; processing frames that are already progressive
22920// into progressive will probably result in lower quality video.
22921const (
22922	// DeinterlacerControlForceAllFrames is a DeinterlacerControl enum value
22923	DeinterlacerControlForceAllFrames = "FORCE_ALL_FRAMES"
22924
22925	// DeinterlacerControlNormal is a DeinterlacerControl enum value
22926	DeinterlacerControlNormal = "NORMAL"
22927)
22928
22929// DeinterlacerControl_Values returns all elements of the DeinterlacerControl enum
22930func DeinterlacerControl_Values() []string {
22931	return []string{
22932		DeinterlacerControlForceAllFrames,
22933		DeinterlacerControlNormal,
22934	}
22935}
22936
22937// Use Deinterlacer (DeinterlaceMode) to choose how the service will do deinterlacing.
22938// Default is Deinterlace. - Deinterlace converts interlaced to progressive.
22939// - Inverse telecine converts Hard Telecine 29.97i to progressive 23.976p.
22940// - Adaptive auto-detects and converts to progressive.
22941const (
22942	// DeinterlacerModeDeinterlace is a DeinterlacerMode enum value
22943	DeinterlacerModeDeinterlace = "DEINTERLACE"
22944
22945	// DeinterlacerModeInverseTelecine is a DeinterlacerMode enum value
22946	DeinterlacerModeInverseTelecine = "INVERSE_TELECINE"
22947
22948	// DeinterlacerModeAdaptive is a DeinterlacerMode enum value
22949	DeinterlacerModeAdaptive = "ADAPTIVE"
22950)
22951
22952// DeinterlacerMode_Values returns all elements of the DeinterlacerMode enum
22953func DeinterlacerMode_Values() []string {
22954	return []string{
22955		DeinterlacerModeDeinterlace,
22956		DeinterlacerModeInverseTelecine,
22957		DeinterlacerModeAdaptive,
22958	}
22959}
22960
22961// Optional field, defaults to DEFAULT. Specify DEFAULT for this operation to
22962// return your endpoints if any exist, or to create an endpoint for you and
22963// return it if one doesn't already exist. Specify GET_ONLY to return your endpoints
22964// if any exist, or an empty list if none exist.
22965const (
22966	// DescribeEndpointsModeDefault is a DescribeEndpointsMode enum value
22967	DescribeEndpointsModeDefault = "DEFAULT"
22968
22969	// DescribeEndpointsModeGetOnly is a DescribeEndpointsMode enum value
22970	DescribeEndpointsModeGetOnly = "GET_ONLY"
22971)
22972
22973// DescribeEndpointsMode_Values returns all elements of the DescribeEndpointsMode enum
22974func DescribeEndpointsMode_Values() []string {
22975	return []string{
22976		DescribeEndpointsModeDefault,
22977		DescribeEndpointsModeGetOnly,
22978	}
22979}
22980
22981// Use Dolby Vision Mode to choose how the service will handle Dolby Vision
22982// MaxCLL and MaxFALL properies.
22983const (
22984	// DolbyVisionLevel6ModePassthrough is a DolbyVisionLevel6Mode enum value
22985	DolbyVisionLevel6ModePassthrough = "PASSTHROUGH"
22986
22987	// DolbyVisionLevel6ModeRecalculate is a DolbyVisionLevel6Mode enum value
22988	DolbyVisionLevel6ModeRecalculate = "RECALCULATE"
22989
22990	// DolbyVisionLevel6ModeSpecify is a DolbyVisionLevel6Mode enum value
22991	DolbyVisionLevel6ModeSpecify = "SPECIFY"
22992)
22993
22994// DolbyVisionLevel6Mode_Values returns all elements of the DolbyVisionLevel6Mode enum
22995func DolbyVisionLevel6Mode_Values() []string {
22996	return []string{
22997		DolbyVisionLevel6ModePassthrough,
22998		DolbyVisionLevel6ModeRecalculate,
22999		DolbyVisionLevel6ModeSpecify,
23000	}
23001}
23002
23003// In the current MediaConvert implementation, the Dolby Vision profile is always
23004// 5 (PROFILE_5). Therefore, all of your inputs must contain Dolby Vision frame
23005// interleaved data.
23006const (
23007	// DolbyVisionProfileProfile5 is a DolbyVisionProfile enum value
23008	DolbyVisionProfileProfile5 = "PROFILE_5"
23009)
23010
23011// DolbyVisionProfile_Values returns all elements of the DolbyVisionProfile enum
23012func DolbyVisionProfile_Values() []string {
23013	return []string{
23014		DolbyVisionProfileProfile5,
23015	}
23016}
23017
23018// Applies only to 29.97 fps outputs. When this feature is enabled, the service
23019// will use drop-frame timecode on outputs. If it is not possible to use drop-frame
23020// timecode, the system will fall back to non-drop-frame. This setting is enabled
23021// by default when Timecode insertion (TimecodeInsertion) is enabled.
23022const (
23023	// DropFrameTimecodeDisabled is a DropFrameTimecode enum value
23024	DropFrameTimecodeDisabled = "DISABLED"
23025
23026	// DropFrameTimecodeEnabled is a DropFrameTimecode enum value
23027	DropFrameTimecodeEnabled = "ENABLED"
23028)
23029
23030// DropFrameTimecode_Values returns all elements of the DropFrameTimecode enum
23031func DropFrameTimecode_Values() []string {
23032	return []string{
23033		DropFrameTimecodeDisabled,
23034		DropFrameTimecodeEnabled,
23035	}
23036}
23037
23038// If no explicit x_position or y_position is provided, setting alignment to
23039// centered will place the captions at the bottom center of the output. Similarly,
23040// setting a left alignment will align captions to the bottom left of the output.
23041// If x and y positions are given in conjunction with the alignment parameter,
23042// the font will be justified (either left or centered) relative to those coordinates.
23043// This option is not valid for source captions that are STL, 608/embedded or
23044// teletext. These source settings are already pre-defined by the caption stream.
23045// All burn-in and DVB-Sub font settings must match.
23046const (
23047	// DvbSubtitleAlignmentCentered is a DvbSubtitleAlignment enum value
23048	DvbSubtitleAlignmentCentered = "CENTERED"
23049
23050	// DvbSubtitleAlignmentLeft is a DvbSubtitleAlignment enum value
23051	DvbSubtitleAlignmentLeft = "LEFT"
23052)
23053
23054// DvbSubtitleAlignment_Values returns all elements of the DvbSubtitleAlignment enum
23055func DvbSubtitleAlignment_Values() []string {
23056	return []string{
23057		DvbSubtitleAlignmentCentered,
23058		DvbSubtitleAlignmentLeft,
23059	}
23060}
23061
23062// Specifies the color of the rectangle behind the captions.All burn-in and
23063// DVB-Sub font settings must match.
23064const (
23065	// DvbSubtitleBackgroundColorNone is a DvbSubtitleBackgroundColor enum value
23066	DvbSubtitleBackgroundColorNone = "NONE"
23067
23068	// DvbSubtitleBackgroundColorBlack is a DvbSubtitleBackgroundColor enum value
23069	DvbSubtitleBackgroundColorBlack = "BLACK"
23070
23071	// DvbSubtitleBackgroundColorWhite is a DvbSubtitleBackgroundColor enum value
23072	DvbSubtitleBackgroundColorWhite = "WHITE"
23073)
23074
23075// DvbSubtitleBackgroundColor_Values returns all elements of the DvbSubtitleBackgroundColor enum
23076func DvbSubtitleBackgroundColor_Values() []string {
23077	return []string{
23078		DvbSubtitleBackgroundColorNone,
23079		DvbSubtitleBackgroundColorBlack,
23080		DvbSubtitleBackgroundColorWhite,
23081	}
23082}
23083
23084// Specifies the color of the burned-in captions. This option is not valid for
23085// source captions that are STL, 608/embedded or teletext. These source settings
23086// are already pre-defined by the caption stream. All burn-in and DVB-Sub font
23087// settings must match.
23088const (
23089	// DvbSubtitleFontColorWhite is a DvbSubtitleFontColor enum value
23090	DvbSubtitleFontColorWhite = "WHITE"
23091
23092	// DvbSubtitleFontColorBlack is a DvbSubtitleFontColor enum value
23093	DvbSubtitleFontColorBlack = "BLACK"
23094
23095	// DvbSubtitleFontColorYellow is a DvbSubtitleFontColor enum value
23096	DvbSubtitleFontColorYellow = "YELLOW"
23097
23098	// DvbSubtitleFontColorRed is a DvbSubtitleFontColor enum value
23099	DvbSubtitleFontColorRed = "RED"
23100
23101	// DvbSubtitleFontColorGreen is a DvbSubtitleFontColor enum value
23102	DvbSubtitleFontColorGreen = "GREEN"
23103
23104	// DvbSubtitleFontColorBlue is a DvbSubtitleFontColor enum value
23105	DvbSubtitleFontColorBlue = "BLUE"
23106)
23107
23108// DvbSubtitleFontColor_Values returns all elements of the DvbSubtitleFontColor enum
23109func DvbSubtitleFontColor_Values() []string {
23110	return []string{
23111		DvbSubtitleFontColorWhite,
23112		DvbSubtitleFontColorBlack,
23113		DvbSubtitleFontColorYellow,
23114		DvbSubtitleFontColorRed,
23115		DvbSubtitleFontColorGreen,
23116		DvbSubtitleFontColorBlue,
23117	}
23118}
23119
23120// Specifies font outline color. This option is not valid for source captions
23121// that are either 608/embedded or teletext. These source settings are already
23122// pre-defined by the caption stream. All burn-in and DVB-Sub font settings
23123// must match.
23124const (
23125	// DvbSubtitleOutlineColorBlack is a DvbSubtitleOutlineColor enum value
23126	DvbSubtitleOutlineColorBlack = "BLACK"
23127
23128	// DvbSubtitleOutlineColorWhite is a DvbSubtitleOutlineColor enum value
23129	DvbSubtitleOutlineColorWhite = "WHITE"
23130
23131	// DvbSubtitleOutlineColorYellow is a DvbSubtitleOutlineColor enum value
23132	DvbSubtitleOutlineColorYellow = "YELLOW"
23133
23134	// DvbSubtitleOutlineColorRed is a DvbSubtitleOutlineColor enum value
23135	DvbSubtitleOutlineColorRed = "RED"
23136
23137	// DvbSubtitleOutlineColorGreen is a DvbSubtitleOutlineColor enum value
23138	DvbSubtitleOutlineColorGreen = "GREEN"
23139
23140	// DvbSubtitleOutlineColorBlue is a DvbSubtitleOutlineColor enum value
23141	DvbSubtitleOutlineColorBlue = "BLUE"
23142)
23143
23144// DvbSubtitleOutlineColor_Values returns all elements of the DvbSubtitleOutlineColor enum
23145func DvbSubtitleOutlineColor_Values() []string {
23146	return []string{
23147		DvbSubtitleOutlineColorBlack,
23148		DvbSubtitleOutlineColorWhite,
23149		DvbSubtitleOutlineColorYellow,
23150		DvbSubtitleOutlineColorRed,
23151		DvbSubtitleOutlineColorGreen,
23152		DvbSubtitleOutlineColorBlue,
23153	}
23154}
23155
23156// Specifies the color of the shadow cast by the captions.All burn-in and DVB-Sub
23157// font settings must match.
23158const (
23159	// DvbSubtitleShadowColorNone is a DvbSubtitleShadowColor enum value
23160	DvbSubtitleShadowColorNone = "NONE"
23161
23162	// DvbSubtitleShadowColorBlack is a DvbSubtitleShadowColor enum value
23163	DvbSubtitleShadowColorBlack = "BLACK"
23164
23165	// DvbSubtitleShadowColorWhite is a DvbSubtitleShadowColor enum value
23166	DvbSubtitleShadowColorWhite = "WHITE"
23167)
23168
23169// DvbSubtitleShadowColor_Values returns all elements of the DvbSubtitleShadowColor enum
23170func DvbSubtitleShadowColor_Values() []string {
23171	return []string{
23172		DvbSubtitleShadowColorNone,
23173		DvbSubtitleShadowColorBlack,
23174		DvbSubtitleShadowColorWhite,
23175	}
23176}
23177
23178// Only applies to jobs with input captions in Teletext or STL formats. Specify
23179// whether the spacing between letters in your captions is set by the captions
23180// grid or varies depending on letter width. Choose fixed grid to conform to
23181// the spacing specified in the captions file more accurately. Choose proportional
23182// to make the text easier to read if the captions are closed caption.
23183const (
23184	// DvbSubtitleTeletextSpacingFixedGrid is a DvbSubtitleTeletextSpacing enum value
23185	DvbSubtitleTeletextSpacingFixedGrid = "FIXED_GRID"
23186
23187	// DvbSubtitleTeletextSpacingProportional is a DvbSubtitleTeletextSpacing enum value
23188	DvbSubtitleTeletextSpacingProportional = "PROPORTIONAL"
23189)
23190
23191// DvbSubtitleTeletextSpacing_Values returns all elements of the DvbSubtitleTeletextSpacing enum
23192func DvbSubtitleTeletextSpacing_Values() []string {
23193	return []string{
23194		DvbSubtitleTeletextSpacingFixedGrid,
23195		DvbSubtitleTeletextSpacingProportional,
23196	}
23197}
23198
23199// Specify whether your DVB subtitles are standard or for hearing impaired.
23200// Choose hearing impaired if your subtitles include audio descriptions and
23201// dialogue. Choose standard if your subtitles include only dialogue.
23202const (
23203	// DvbSubtitlingTypeHearingImpaired is a DvbSubtitlingType enum value
23204	DvbSubtitlingTypeHearingImpaired = "HEARING_IMPAIRED"
23205
23206	// DvbSubtitlingTypeStandard is a DvbSubtitlingType enum value
23207	DvbSubtitlingTypeStandard = "STANDARD"
23208)
23209
23210// DvbSubtitlingType_Values returns all elements of the DvbSubtitlingType enum
23211func DvbSubtitlingType_Values() []string {
23212	return []string{
23213		DvbSubtitlingTypeHearingImpaired,
23214		DvbSubtitlingTypeStandard,
23215	}
23216}
23217
23218// Specify the bitstream mode for the E-AC-3 stream that the encoder emits.
23219// For more information about the EAC3 bitstream mode, see ATSC A/52-2012 (Annex
23220// E).
23221const (
23222	// Eac3AtmosBitstreamModeCompleteMain is a Eac3AtmosBitstreamMode enum value
23223	Eac3AtmosBitstreamModeCompleteMain = "COMPLETE_MAIN"
23224)
23225
23226// Eac3AtmosBitstreamMode_Values returns all elements of the Eac3AtmosBitstreamMode enum
23227func Eac3AtmosBitstreamMode_Values() []string {
23228	return []string{
23229		Eac3AtmosBitstreamModeCompleteMain,
23230	}
23231}
23232
23233// The coding mode for Dolby Digital Plus JOC (Atmos) is always 9.1.6 (CODING_MODE_9_1_6).
23234const (
23235	// Eac3AtmosCodingModeCodingMode916 is a Eac3AtmosCodingMode enum value
23236	Eac3AtmosCodingModeCodingMode916 = "CODING_MODE_9_1_6"
23237)
23238
23239// Eac3AtmosCodingMode_Values returns all elements of the Eac3AtmosCodingMode enum
23240func Eac3AtmosCodingMode_Values() []string {
23241	return []string{
23242		Eac3AtmosCodingModeCodingMode916,
23243	}
23244}
23245
23246// Enable Dolby Dialogue Intelligence to adjust loudness based on dialogue analysis.
23247const (
23248	// Eac3AtmosDialogueIntelligenceEnabled is a Eac3AtmosDialogueIntelligence enum value
23249	Eac3AtmosDialogueIntelligenceEnabled = "ENABLED"
23250
23251	// Eac3AtmosDialogueIntelligenceDisabled is a Eac3AtmosDialogueIntelligence enum value
23252	Eac3AtmosDialogueIntelligenceDisabled = "DISABLED"
23253)
23254
23255// Eac3AtmosDialogueIntelligence_Values returns all elements of the Eac3AtmosDialogueIntelligence enum
23256func Eac3AtmosDialogueIntelligence_Values() []string {
23257	return []string{
23258		Eac3AtmosDialogueIntelligenceEnabled,
23259		Eac3AtmosDialogueIntelligenceDisabled,
23260	}
23261}
23262
23263// Specify the absolute peak level for a signal with dynamic range compression.
23264const (
23265	// Eac3AtmosDynamicRangeCompressionLineNone is a Eac3AtmosDynamicRangeCompressionLine enum value
23266	Eac3AtmosDynamicRangeCompressionLineNone = "NONE"
23267
23268	// Eac3AtmosDynamicRangeCompressionLineFilmStandard is a Eac3AtmosDynamicRangeCompressionLine enum value
23269	Eac3AtmosDynamicRangeCompressionLineFilmStandard = "FILM_STANDARD"
23270
23271	// Eac3AtmosDynamicRangeCompressionLineFilmLight is a Eac3AtmosDynamicRangeCompressionLine enum value
23272	Eac3AtmosDynamicRangeCompressionLineFilmLight = "FILM_LIGHT"
23273
23274	// Eac3AtmosDynamicRangeCompressionLineMusicStandard is a Eac3AtmosDynamicRangeCompressionLine enum value
23275	Eac3AtmosDynamicRangeCompressionLineMusicStandard = "MUSIC_STANDARD"
23276
23277	// Eac3AtmosDynamicRangeCompressionLineMusicLight is a Eac3AtmosDynamicRangeCompressionLine enum value
23278	Eac3AtmosDynamicRangeCompressionLineMusicLight = "MUSIC_LIGHT"
23279
23280	// Eac3AtmosDynamicRangeCompressionLineSpeech is a Eac3AtmosDynamicRangeCompressionLine enum value
23281	Eac3AtmosDynamicRangeCompressionLineSpeech = "SPEECH"
23282)
23283
23284// Eac3AtmosDynamicRangeCompressionLine_Values returns all elements of the Eac3AtmosDynamicRangeCompressionLine enum
23285func Eac3AtmosDynamicRangeCompressionLine_Values() []string {
23286	return []string{
23287		Eac3AtmosDynamicRangeCompressionLineNone,
23288		Eac3AtmosDynamicRangeCompressionLineFilmStandard,
23289		Eac3AtmosDynamicRangeCompressionLineFilmLight,
23290		Eac3AtmosDynamicRangeCompressionLineMusicStandard,
23291		Eac3AtmosDynamicRangeCompressionLineMusicLight,
23292		Eac3AtmosDynamicRangeCompressionLineSpeech,
23293	}
23294}
23295
23296// Specify how the service limits the audio dynamic range when compressing the
23297// audio.
23298const (
23299	// Eac3AtmosDynamicRangeCompressionRfNone is a Eac3AtmosDynamicRangeCompressionRf enum value
23300	Eac3AtmosDynamicRangeCompressionRfNone = "NONE"
23301
23302	// Eac3AtmosDynamicRangeCompressionRfFilmStandard is a Eac3AtmosDynamicRangeCompressionRf enum value
23303	Eac3AtmosDynamicRangeCompressionRfFilmStandard = "FILM_STANDARD"
23304
23305	// Eac3AtmosDynamicRangeCompressionRfFilmLight is a Eac3AtmosDynamicRangeCompressionRf enum value
23306	Eac3AtmosDynamicRangeCompressionRfFilmLight = "FILM_LIGHT"
23307
23308	// Eac3AtmosDynamicRangeCompressionRfMusicStandard is a Eac3AtmosDynamicRangeCompressionRf enum value
23309	Eac3AtmosDynamicRangeCompressionRfMusicStandard = "MUSIC_STANDARD"
23310
23311	// Eac3AtmosDynamicRangeCompressionRfMusicLight is a Eac3AtmosDynamicRangeCompressionRf enum value
23312	Eac3AtmosDynamicRangeCompressionRfMusicLight = "MUSIC_LIGHT"
23313
23314	// Eac3AtmosDynamicRangeCompressionRfSpeech is a Eac3AtmosDynamicRangeCompressionRf enum value
23315	Eac3AtmosDynamicRangeCompressionRfSpeech = "SPEECH"
23316)
23317
23318// Eac3AtmosDynamicRangeCompressionRf_Values returns all elements of the Eac3AtmosDynamicRangeCompressionRf enum
23319func Eac3AtmosDynamicRangeCompressionRf_Values() []string {
23320	return []string{
23321		Eac3AtmosDynamicRangeCompressionRfNone,
23322		Eac3AtmosDynamicRangeCompressionRfFilmStandard,
23323		Eac3AtmosDynamicRangeCompressionRfFilmLight,
23324		Eac3AtmosDynamicRangeCompressionRfMusicStandard,
23325		Eac3AtmosDynamicRangeCompressionRfMusicLight,
23326		Eac3AtmosDynamicRangeCompressionRfSpeech,
23327	}
23328}
23329
23330// Choose how the service meters the loudness of your audio.
23331const (
23332	// Eac3AtmosMeteringModeLeqA is a Eac3AtmosMeteringMode enum value
23333	Eac3AtmosMeteringModeLeqA = "LEQ_A"
23334
23335	// Eac3AtmosMeteringModeItuBs17701 is a Eac3AtmosMeteringMode enum value
23336	Eac3AtmosMeteringModeItuBs17701 = "ITU_BS_1770_1"
23337
23338	// Eac3AtmosMeteringModeItuBs17702 is a Eac3AtmosMeteringMode enum value
23339	Eac3AtmosMeteringModeItuBs17702 = "ITU_BS_1770_2"
23340
23341	// Eac3AtmosMeteringModeItuBs17703 is a Eac3AtmosMeteringMode enum value
23342	Eac3AtmosMeteringModeItuBs17703 = "ITU_BS_1770_3"
23343
23344	// Eac3AtmosMeteringModeItuBs17704 is a Eac3AtmosMeteringMode enum value
23345	Eac3AtmosMeteringModeItuBs17704 = "ITU_BS_1770_4"
23346)
23347
23348// Eac3AtmosMeteringMode_Values returns all elements of the Eac3AtmosMeteringMode enum
23349func Eac3AtmosMeteringMode_Values() []string {
23350	return []string{
23351		Eac3AtmosMeteringModeLeqA,
23352		Eac3AtmosMeteringModeItuBs17701,
23353		Eac3AtmosMeteringModeItuBs17702,
23354		Eac3AtmosMeteringModeItuBs17703,
23355		Eac3AtmosMeteringModeItuBs17704,
23356	}
23357}
23358
23359// Choose how the service does stereo downmixing.
23360const (
23361	// Eac3AtmosStereoDownmixNotIndicated is a Eac3AtmosStereoDownmix enum value
23362	Eac3AtmosStereoDownmixNotIndicated = "NOT_INDICATED"
23363
23364	// Eac3AtmosStereoDownmixStereo is a Eac3AtmosStereoDownmix enum value
23365	Eac3AtmosStereoDownmixStereo = "STEREO"
23366
23367	// Eac3AtmosStereoDownmixSurround is a Eac3AtmosStereoDownmix enum value
23368	Eac3AtmosStereoDownmixSurround = "SURROUND"
23369
23370	// Eac3AtmosStereoDownmixDpl2 is a Eac3AtmosStereoDownmix enum value
23371	Eac3AtmosStereoDownmixDpl2 = "DPL2"
23372)
23373
23374// Eac3AtmosStereoDownmix_Values returns all elements of the Eac3AtmosStereoDownmix enum
23375func Eac3AtmosStereoDownmix_Values() []string {
23376	return []string{
23377		Eac3AtmosStereoDownmixNotIndicated,
23378		Eac3AtmosStereoDownmixStereo,
23379		Eac3AtmosStereoDownmixSurround,
23380		Eac3AtmosStereoDownmixDpl2,
23381	}
23382}
23383
23384// Specify whether your input audio has an additional center rear surround channel
23385// matrix encoded into your left and right surround channels.
23386const (
23387	// Eac3AtmosSurroundExModeNotIndicated is a Eac3AtmosSurroundExMode enum value
23388	Eac3AtmosSurroundExModeNotIndicated = "NOT_INDICATED"
23389
23390	// Eac3AtmosSurroundExModeEnabled is a Eac3AtmosSurroundExMode enum value
23391	Eac3AtmosSurroundExModeEnabled = "ENABLED"
23392
23393	// Eac3AtmosSurroundExModeDisabled is a Eac3AtmosSurroundExMode enum value
23394	Eac3AtmosSurroundExModeDisabled = "DISABLED"
23395)
23396
23397// Eac3AtmosSurroundExMode_Values returns all elements of the Eac3AtmosSurroundExMode enum
23398func Eac3AtmosSurroundExMode_Values() []string {
23399	return []string{
23400		Eac3AtmosSurroundExModeNotIndicated,
23401		Eac3AtmosSurroundExModeEnabled,
23402		Eac3AtmosSurroundExModeDisabled,
23403	}
23404}
23405
23406// If set to ATTENUATE_3_DB, applies a 3 dB attenuation to the surround channels.
23407// Only used for 3/2 coding mode.
23408const (
23409	// Eac3AttenuationControlAttenuate3Db is a Eac3AttenuationControl enum value
23410	Eac3AttenuationControlAttenuate3Db = "ATTENUATE_3_DB"
23411
23412	// Eac3AttenuationControlNone is a Eac3AttenuationControl enum value
23413	Eac3AttenuationControlNone = "NONE"
23414)
23415
23416// Eac3AttenuationControl_Values returns all elements of the Eac3AttenuationControl enum
23417func Eac3AttenuationControl_Values() []string {
23418	return []string{
23419		Eac3AttenuationControlAttenuate3Db,
23420		Eac3AttenuationControlNone,
23421	}
23422}
23423
23424// Specify the bitstream mode for the E-AC-3 stream that the encoder emits.
23425// For more information about the EAC3 bitstream mode, see ATSC A/52-2012 (Annex
23426// E).
23427const (
23428	// Eac3BitstreamModeCompleteMain is a Eac3BitstreamMode enum value
23429	Eac3BitstreamModeCompleteMain = "COMPLETE_MAIN"
23430
23431	// Eac3BitstreamModeCommentary is a Eac3BitstreamMode enum value
23432	Eac3BitstreamModeCommentary = "COMMENTARY"
23433
23434	// Eac3BitstreamModeEmergency is a Eac3BitstreamMode enum value
23435	Eac3BitstreamModeEmergency = "EMERGENCY"
23436
23437	// Eac3BitstreamModeHearingImpaired is a Eac3BitstreamMode enum value
23438	Eac3BitstreamModeHearingImpaired = "HEARING_IMPAIRED"
23439
23440	// Eac3BitstreamModeVisuallyImpaired is a Eac3BitstreamMode enum value
23441	Eac3BitstreamModeVisuallyImpaired = "VISUALLY_IMPAIRED"
23442)
23443
23444// Eac3BitstreamMode_Values returns all elements of the Eac3BitstreamMode enum
23445func Eac3BitstreamMode_Values() []string {
23446	return []string{
23447		Eac3BitstreamModeCompleteMain,
23448		Eac3BitstreamModeCommentary,
23449		Eac3BitstreamModeEmergency,
23450		Eac3BitstreamModeHearingImpaired,
23451		Eac3BitstreamModeVisuallyImpaired,
23452	}
23453}
23454
23455// Dolby Digital Plus coding mode. Determines number of channels.
23456const (
23457	// Eac3CodingModeCodingMode10 is a Eac3CodingMode enum value
23458	Eac3CodingModeCodingMode10 = "CODING_MODE_1_0"
23459
23460	// Eac3CodingModeCodingMode20 is a Eac3CodingMode enum value
23461	Eac3CodingModeCodingMode20 = "CODING_MODE_2_0"
23462
23463	// Eac3CodingModeCodingMode32 is a Eac3CodingMode enum value
23464	Eac3CodingModeCodingMode32 = "CODING_MODE_3_2"
23465)
23466
23467// Eac3CodingMode_Values returns all elements of the Eac3CodingMode enum
23468func Eac3CodingMode_Values() []string {
23469	return []string{
23470		Eac3CodingModeCodingMode10,
23471		Eac3CodingModeCodingMode20,
23472		Eac3CodingModeCodingMode32,
23473	}
23474}
23475
23476// Activates a DC highpass filter for all input channels.
23477const (
23478	// Eac3DcFilterEnabled is a Eac3DcFilter enum value
23479	Eac3DcFilterEnabled = "ENABLED"
23480
23481	// Eac3DcFilterDisabled is a Eac3DcFilter enum value
23482	Eac3DcFilterDisabled = "DISABLED"
23483)
23484
23485// Eac3DcFilter_Values returns all elements of the Eac3DcFilter enum
23486func Eac3DcFilter_Values() []string {
23487	return []string{
23488		Eac3DcFilterEnabled,
23489		Eac3DcFilterDisabled,
23490	}
23491}
23492
23493// Specify the absolute peak level for a signal with dynamic range compression.
23494const (
23495	// Eac3DynamicRangeCompressionLineNone is a Eac3DynamicRangeCompressionLine enum value
23496	Eac3DynamicRangeCompressionLineNone = "NONE"
23497
23498	// Eac3DynamicRangeCompressionLineFilmStandard is a Eac3DynamicRangeCompressionLine enum value
23499	Eac3DynamicRangeCompressionLineFilmStandard = "FILM_STANDARD"
23500
23501	// Eac3DynamicRangeCompressionLineFilmLight is a Eac3DynamicRangeCompressionLine enum value
23502	Eac3DynamicRangeCompressionLineFilmLight = "FILM_LIGHT"
23503
23504	// Eac3DynamicRangeCompressionLineMusicStandard is a Eac3DynamicRangeCompressionLine enum value
23505	Eac3DynamicRangeCompressionLineMusicStandard = "MUSIC_STANDARD"
23506
23507	// Eac3DynamicRangeCompressionLineMusicLight is a Eac3DynamicRangeCompressionLine enum value
23508	Eac3DynamicRangeCompressionLineMusicLight = "MUSIC_LIGHT"
23509
23510	// Eac3DynamicRangeCompressionLineSpeech is a Eac3DynamicRangeCompressionLine enum value
23511	Eac3DynamicRangeCompressionLineSpeech = "SPEECH"
23512)
23513
23514// Eac3DynamicRangeCompressionLine_Values returns all elements of the Eac3DynamicRangeCompressionLine enum
23515func Eac3DynamicRangeCompressionLine_Values() []string {
23516	return []string{
23517		Eac3DynamicRangeCompressionLineNone,
23518		Eac3DynamicRangeCompressionLineFilmStandard,
23519		Eac3DynamicRangeCompressionLineFilmLight,
23520		Eac3DynamicRangeCompressionLineMusicStandard,
23521		Eac3DynamicRangeCompressionLineMusicLight,
23522		Eac3DynamicRangeCompressionLineSpeech,
23523	}
23524}
23525
23526// Specify how the service limits the audio dynamic range when compressing the
23527// audio.
23528const (
23529	// Eac3DynamicRangeCompressionRfNone is a Eac3DynamicRangeCompressionRf enum value
23530	Eac3DynamicRangeCompressionRfNone = "NONE"
23531
23532	// Eac3DynamicRangeCompressionRfFilmStandard is a Eac3DynamicRangeCompressionRf enum value
23533	Eac3DynamicRangeCompressionRfFilmStandard = "FILM_STANDARD"
23534
23535	// Eac3DynamicRangeCompressionRfFilmLight is a Eac3DynamicRangeCompressionRf enum value
23536	Eac3DynamicRangeCompressionRfFilmLight = "FILM_LIGHT"
23537
23538	// Eac3DynamicRangeCompressionRfMusicStandard is a Eac3DynamicRangeCompressionRf enum value
23539	Eac3DynamicRangeCompressionRfMusicStandard = "MUSIC_STANDARD"
23540
23541	// Eac3DynamicRangeCompressionRfMusicLight is a Eac3DynamicRangeCompressionRf enum value
23542	Eac3DynamicRangeCompressionRfMusicLight = "MUSIC_LIGHT"
23543
23544	// Eac3DynamicRangeCompressionRfSpeech is a Eac3DynamicRangeCompressionRf enum value
23545	Eac3DynamicRangeCompressionRfSpeech = "SPEECH"
23546)
23547
23548// Eac3DynamicRangeCompressionRf_Values returns all elements of the Eac3DynamicRangeCompressionRf enum
23549func Eac3DynamicRangeCompressionRf_Values() []string {
23550	return []string{
23551		Eac3DynamicRangeCompressionRfNone,
23552		Eac3DynamicRangeCompressionRfFilmStandard,
23553		Eac3DynamicRangeCompressionRfFilmLight,
23554		Eac3DynamicRangeCompressionRfMusicStandard,
23555		Eac3DynamicRangeCompressionRfMusicLight,
23556		Eac3DynamicRangeCompressionRfSpeech,
23557	}
23558}
23559
23560// When encoding 3/2 audio, controls whether the LFE channel is enabled
23561const (
23562	// Eac3LfeControlLfe is a Eac3LfeControl enum value
23563	Eac3LfeControlLfe = "LFE"
23564
23565	// Eac3LfeControlNoLfe is a Eac3LfeControl enum value
23566	Eac3LfeControlNoLfe = "NO_LFE"
23567)
23568
23569// Eac3LfeControl_Values returns all elements of the Eac3LfeControl enum
23570func Eac3LfeControl_Values() []string {
23571	return []string{
23572		Eac3LfeControlLfe,
23573		Eac3LfeControlNoLfe,
23574	}
23575}
23576
23577// Applies a 120Hz lowpass filter to the LFE channel prior to encoding. Only
23578// valid with 3_2_LFE coding mode.
23579const (
23580	// Eac3LfeFilterEnabled is a Eac3LfeFilter enum value
23581	Eac3LfeFilterEnabled = "ENABLED"
23582
23583	// Eac3LfeFilterDisabled is a Eac3LfeFilter enum value
23584	Eac3LfeFilterDisabled = "DISABLED"
23585)
23586
23587// Eac3LfeFilter_Values returns all elements of the Eac3LfeFilter enum
23588func Eac3LfeFilter_Values() []string {
23589	return []string{
23590		Eac3LfeFilterEnabled,
23591		Eac3LfeFilterDisabled,
23592	}
23593}
23594
23595// When set to FOLLOW_INPUT, encoder metadata will be sourced from the DD, DD+,
23596// or DolbyE decoder that supplied this audio data. If audio was not supplied
23597// from one of these streams, then the static metadata settings will be used.
23598const (
23599	// Eac3MetadataControlFollowInput is a Eac3MetadataControl enum value
23600	Eac3MetadataControlFollowInput = "FOLLOW_INPUT"
23601
23602	// Eac3MetadataControlUseConfigured is a Eac3MetadataControl enum value
23603	Eac3MetadataControlUseConfigured = "USE_CONFIGURED"
23604)
23605
23606// Eac3MetadataControl_Values returns all elements of the Eac3MetadataControl enum
23607func Eac3MetadataControl_Values() []string {
23608	return []string{
23609		Eac3MetadataControlFollowInput,
23610		Eac3MetadataControlUseConfigured,
23611	}
23612}
23613
23614// When set to WHEN_POSSIBLE, input DD+ audio will be passed through if it is
23615// present on the input. this detection is dynamic over the life of the transcode.
23616// Inputs that alternate between DD+ and non-DD+ content will have a consistent
23617// DD+ output as the system alternates between passthrough and encoding.
23618const (
23619	// Eac3PassthroughControlWhenPossible is a Eac3PassthroughControl enum value
23620	Eac3PassthroughControlWhenPossible = "WHEN_POSSIBLE"
23621
23622	// Eac3PassthroughControlNoPassthrough is a Eac3PassthroughControl enum value
23623	Eac3PassthroughControlNoPassthrough = "NO_PASSTHROUGH"
23624)
23625
23626// Eac3PassthroughControl_Values returns all elements of the Eac3PassthroughControl enum
23627func Eac3PassthroughControl_Values() []string {
23628	return []string{
23629		Eac3PassthroughControlWhenPossible,
23630		Eac3PassthroughControlNoPassthrough,
23631	}
23632}
23633
23634// Controls the amount of phase-shift applied to the surround channels. Only
23635// used for 3/2 coding mode.
23636const (
23637	// Eac3PhaseControlShift90Degrees is a Eac3PhaseControl enum value
23638	Eac3PhaseControlShift90Degrees = "SHIFT_90_DEGREES"
23639
23640	// Eac3PhaseControlNoShift is a Eac3PhaseControl enum value
23641	Eac3PhaseControlNoShift = "NO_SHIFT"
23642)
23643
23644// Eac3PhaseControl_Values returns all elements of the Eac3PhaseControl enum
23645func Eac3PhaseControl_Values() []string {
23646	return []string{
23647		Eac3PhaseControlShift90Degrees,
23648		Eac3PhaseControlNoShift,
23649	}
23650}
23651
23652// Choose how the service does stereo downmixing. This setting only applies
23653// if you keep the default value of 3/2 - L, R, C, Ls, Rs (CODING_MODE_3_2)
23654// for the setting Coding mode (Eac3CodingMode). If you choose a different value
23655// for Coding mode, the service ignores Stereo downmix (Eac3StereoDownmix).
23656const (
23657	// Eac3StereoDownmixNotIndicated is a Eac3StereoDownmix enum value
23658	Eac3StereoDownmixNotIndicated = "NOT_INDICATED"
23659
23660	// Eac3StereoDownmixLoRo is a Eac3StereoDownmix enum value
23661	Eac3StereoDownmixLoRo = "LO_RO"
23662
23663	// Eac3StereoDownmixLtRt is a Eac3StereoDownmix enum value
23664	Eac3StereoDownmixLtRt = "LT_RT"
23665
23666	// Eac3StereoDownmixDpl2 is a Eac3StereoDownmix enum value
23667	Eac3StereoDownmixDpl2 = "DPL2"
23668)
23669
23670// Eac3StereoDownmix_Values returns all elements of the Eac3StereoDownmix enum
23671func Eac3StereoDownmix_Values() []string {
23672	return []string{
23673		Eac3StereoDownmixNotIndicated,
23674		Eac3StereoDownmixLoRo,
23675		Eac3StereoDownmixLtRt,
23676		Eac3StereoDownmixDpl2,
23677	}
23678}
23679
23680// When encoding 3/2 audio, sets whether an extra center back surround channel
23681// is matrix encoded into the left and right surround channels.
23682const (
23683	// Eac3SurroundExModeNotIndicated is a Eac3SurroundExMode enum value
23684	Eac3SurroundExModeNotIndicated = "NOT_INDICATED"
23685
23686	// Eac3SurroundExModeEnabled is a Eac3SurroundExMode enum value
23687	Eac3SurroundExModeEnabled = "ENABLED"
23688
23689	// Eac3SurroundExModeDisabled is a Eac3SurroundExMode enum value
23690	Eac3SurroundExModeDisabled = "DISABLED"
23691)
23692
23693// Eac3SurroundExMode_Values returns all elements of the Eac3SurroundExMode enum
23694func Eac3SurroundExMode_Values() []string {
23695	return []string{
23696		Eac3SurroundExModeNotIndicated,
23697		Eac3SurroundExModeEnabled,
23698		Eac3SurroundExModeDisabled,
23699	}
23700}
23701
23702// When encoding 2/0 audio, sets whether Dolby Surround is matrix encoded into
23703// the two channels.
23704const (
23705	// Eac3SurroundModeNotIndicated is a Eac3SurroundMode enum value
23706	Eac3SurroundModeNotIndicated = "NOT_INDICATED"
23707
23708	// Eac3SurroundModeEnabled is a Eac3SurroundMode enum value
23709	Eac3SurroundModeEnabled = "ENABLED"
23710
23711	// Eac3SurroundModeDisabled is a Eac3SurroundMode enum value
23712	Eac3SurroundModeDisabled = "DISABLED"
23713)
23714
23715// Eac3SurroundMode_Values returns all elements of the Eac3SurroundMode enum
23716func Eac3SurroundMode_Values() []string {
23717	return []string{
23718		Eac3SurroundModeNotIndicated,
23719		Eac3SurroundModeEnabled,
23720		Eac3SurroundModeDisabled,
23721	}
23722}
23723
23724// Specify whether this set of input captions appears in your outputs in both
23725// 608 and 708 format. If you choose Upconvert (UPCONVERT), MediaConvert includes
23726// the captions data in two ways: it passes the 608 data through using the 608
23727// compatibility bytes fields of the 708 wrapper, and it also translates the
23728// 608 data into 708.
23729const (
23730	// EmbeddedConvert608To708Upconvert is a EmbeddedConvert608To708 enum value
23731	EmbeddedConvert608To708Upconvert = "UPCONVERT"
23732
23733	// EmbeddedConvert608To708Disabled is a EmbeddedConvert608To708 enum value
23734	EmbeddedConvert608To708Disabled = "DISABLED"
23735)
23736
23737// EmbeddedConvert608To708_Values returns all elements of the EmbeddedConvert608To708 enum
23738func EmbeddedConvert608To708_Values() []string {
23739	return []string{
23740		EmbeddedConvert608To708Upconvert,
23741		EmbeddedConvert608To708Disabled,
23742	}
23743}
23744
23745// By default, the service terminates any unterminated captions at the end of
23746// each input. If you want the caption to continue onto your next input, disable
23747// this setting.
23748const (
23749	// EmbeddedTerminateCaptionsEndOfInput is a EmbeddedTerminateCaptions enum value
23750	EmbeddedTerminateCaptionsEndOfInput = "END_OF_INPUT"
23751
23752	// EmbeddedTerminateCaptionsDisabled is a EmbeddedTerminateCaptions enum value
23753	EmbeddedTerminateCaptionsDisabled = "DISABLED"
23754)
23755
23756// EmbeddedTerminateCaptions_Values returns all elements of the EmbeddedTerminateCaptions enum
23757func EmbeddedTerminateCaptions_Values() []string {
23758	return []string{
23759		EmbeddedTerminateCaptionsEndOfInput,
23760		EmbeddedTerminateCaptionsDisabled,
23761	}
23762}
23763
23764// If set to PROGRESSIVE_DOWNLOAD, the MOOV atom is relocated to the beginning
23765// of the archive as required for progressive downloading. Otherwise it is placed
23766// normally at the end.
23767const (
23768	// F4vMoovPlacementProgressiveDownload is a F4vMoovPlacement enum value
23769	F4vMoovPlacementProgressiveDownload = "PROGRESSIVE_DOWNLOAD"
23770
23771	// F4vMoovPlacementNormal is a F4vMoovPlacement enum value
23772	F4vMoovPlacementNormal = "NORMAL"
23773)
23774
23775// F4vMoovPlacement_Values returns all elements of the F4vMoovPlacement enum
23776func F4vMoovPlacement_Values() []string {
23777	return []string{
23778		F4vMoovPlacementProgressiveDownload,
23779		F4vMoovPlacementNormal,
23780	}
23781}
23782
23783// Specify whether this set of input captions appears in your outputs in both
23784// 608 and 708 format. If you choose Upconvert (UPCONVERT), MediaConvert includes
23785// the captions data in two ways: it passes the 608 data through using the 608
23786// compatibility bytes fields of the 708 wrapper, and it also translates the
23787// 608 data into 708.
23788const (
23789	// FileSourceConvert608To708Upconvert is a FileSourceConvert608To708 enum value
23790	FileSourceConvert608To708Upconvert = "UPCONVERT"
23791
23792	// FileSourceConvert608To708Disabled is a FileSourceConvert608To708 enum value
23793	FileSourceConvert608To708Disabled = "DISABLED"
23794)
23795
23796// FileSourceConvert608To708_Values returns all elements of the FileSourceConvert608To708 enum
23797func FileSourceConvert608To708_Values() []string {
23798	return []string{
23799		FileSourceConvert608To708Upconvert,
23800		FileSourceConvert608To708Disabled,
23801	}
23802}
23803
23804// Provide the font script, using an ISO 15924 script code, if the LanguageCode
23805// is not sufficient for determining the script type. Where LanguageCode or
23806// CustomLanguageCode is sufficient, use "AUTOMATIC" or leave unset.
23807const (
23808	// FontScriptAutomatic is a FontScript enum value
23809	FontScriptAutomatic = "AUTOMATIC"
23810
23811	// FontScriptHans is a FontScript enum value
23812	FontScriptHans = "HANS"
23813
23814	// FontScriptHant is a FontScript enum value
23815	FontScriptHant = "HANT"
23816)
23817
23818// FontScript_Values returns all elements of the FontScript enum
23819func FontScript_Values() []string {
23820	return []string{
23821		FontScriptAutomatic,
23822		FontScriptHans,
23823		FontScriptHant,
23824	}
23825}
23826
23827// Keep the default value, Auto (AUTO), for this setting to have MediaConvert
23828// automatically apply the best types of quantization for your video content.
23829// When you want to apply your quantization settings manually, you must set
23830// H264AdaptiveQuantization to a value other than Auto (AUTO). Use this setting
23831// to specify the strength of any adaptive quantization filters that you enable.
23832// If you don't want MediaConvert to do any adaptive quantization in this transcode,
23833// set Adaptive quantization (H264AdaptiveQuantization) to Off (OFF). Related
23834// settings: The value that you choose here applies to the following settings:
23835// H264FlickerAdaptiveQuantization, H264SpatialAdaptiveQuantization, and H264TemporalAdaptiveQuantization.
23836const (
23837	// H264AdaptiveQuantizationOff is a H264AdaptiveQuantization enum value
23838	H264AdaptiveQuantizationOff = "OFF"
23839
23840	// H264AdaptiveQuantizationAuto is a H264AdaptiveQuantization enum value
23841	H264AdaptiveQuantizationAuto = "AUTO"
23842
23843	// H264AdaptiveQuantizationLow is a H264AdaptiveQuantization enum value
23844	H264AdaptiveQuantizationLow = "LOW"
23845
23846	// H264AdaptiveQuantizationMedium is a H264AdaptiveQuantization enum value
23847	H264AdaptiveQuantizationMedium = "MEDIUM"
23848
23849	// H264AdaptiveQuantizationHigh is a H264AdaptiveQuantization enum value
23850	H264AdaptiveQuantizationHigh = "HIGH"
23851
23852	// H264AdaptiveQuantizationHigher is a H264AdaptiveQuantization enum value
23853	H264AdaptiveQuantizationHigher = "HIGHER"
23854
23855	// H264AdaptiveQuantizationMax is a H264AdaptiveQuantization enum value
23856	H264AdaptiveQuantizationMax = "MAX"
23857)
23858
23859// H264AdaptiveQuantization_Values returns all elements of the H264AdaptiveQuantization enum
23860func H264AdaptiveQuantization_Values() []string {
23861	return []string{
23862		H264AdaptiveQuantizationOff,
23863		H264AdaptiveQuantizationAuto,
23864		H264AdaptiveQuantizationLow,
23865		H264AdaptiveQuantizationMedium,
23866		H264AdaptiveQuantizationHigh,
23867		H264AdaptiveQuantizationHigher,
23868		H264AdaptiveQuantizationMax,
23869	}
23870}
23871
23872// Specify an H.264 level that is consistent with your output video settings.
23873// If you aren't sure what level to specify, choose Auto (AUTO).
23874const (
23875	// H264CodecLevelAuto is a H264CodecLevel enum value
23876	H264CodecLevelAuto = "AUTO"
23877
23878	// H264CodecLevelLevel1 is a H264CodecLevel enum value
23879	H264CodecLevelLevel1 = "LEVEL_1"
23880
23881	// H264CodecLevelLevel11 is a H264CodecLevel enum value
23882	H264CodecLevelLevel11 = "LEVEL_1_1"
23883
23884	// H264CodecLevelLevel12 is a H264CodecLevel enum value
23885	H264CodecLevelLevel12 = "LEVEL_1_2"
23886
23887	// H264CodecLevelLevel13 is a H264CodecLevel enum value
23888	H264CodecLevelLevel13 = "LEVEL_1_3"
23889
23890	// H264CodecLevelLevel2 is a H264CodecLevel enum value
23891	H264CodecLevelLevel2 = "LEVEL_2"
23892
23893	// H264CodecLevelLevel21 is a H264CodecLevel enum value
23894	H264CodecLevelLevel21 = "LEVEL_2_1"
23895
23896	// H264CodecLevelLevel22 is a H264CodecLevel enum value
23897	H264CodecLevelLevel22 = "LEVEL_2_2"
23898
23899	// H264CodecLevelLevel3 is a H264CodecLevel enum value
23900	H264CodecLevelLevel3 = "LEVEL_3"
23901
23902	// H264CodecLevelLevel31 is a H264CodecLevel enum value
23903	H264CodecLevelLevel31 = "LEVEL_3_1"
23904
23905	// H264CodecLevelLevel32 is a H264CodecLevel enum value
23906	H264CodecLevelLevel32 = "LEVEL_3_2"
23907
23908	// H264CodecLevelLevel4 is a H264CodecLevel enum value
23909	H264CodecLevelLevel4 = "LEVEL_4"
23910
23911	// H264CodecLevelLevel41 is a H264CodecLevel enum value
23912	H264CodecLevelLevel41 = "LEVEL_4_1"
23913
23914	// H264CodecLevelLevel42 is a H264CodecLevel enum value
23915	H264CodecLevelLevel42 = "LEVEL_4_2"
23916
23917	// H264CodecLevelLevel5 is a H264CodecLevel enum value
23918	H264CodecLevelLevel5 = "LEVEL_5"
23919
23920	// H264CodecLevelLevel51 is a H264CodecLevel enum value
23921	H264CodecLevelLevel51 = "LEVEL_5_1"
23922
23923	// H264CodecLevelLevel52 is a H264CodecLevel enum value
23924	H264CodecLevelLevel52 = "LEVEL_5_2"
23925)
23926
23927// H264CodecLevel_Values returns all elements of the H264CodecLevel enum
23928func H264CodecLevel_Values() []string {
23929	return []string{
23930		H264CodecLevelAuto,
23931		H264CodecLevelLevel1,
23932		H264CodecLevelLevel11,
23933		H264CodecLevelLevel12,
23934		H264CodecLevelLevel13,
23935		H264CodecLevelLevel2,
23936		H264CodecLevelLevel21,
23937		H264CodecLevelLevel22,
23938		H264CodecLevelLevel3,
23939		H264CodecLevelLevel31,
23940		H264CodecLevelLevel32,
23941		H264CodecLevelLevel4,
23942		H264CodecLevelLevel41,
23943		H264CodecLevelLevel42,
23944		H264CodecLevelLevel5,
23945		H264CodecLevelLevel51,
23946		H264CodecLevelLevel52,
23947	}
23948}
23949
23950// H.264 Profile. High 4:2:2 and 10-bit profiles are only available with the
23951// AVC-I License.
23952const (
23953	// H264CodecProfileBaseline is a H264CodecProfile enum value
23954	H264CodecProfileBaseline = "BASELINE"
23955
23956	// H264CodecProfileHigh is a H264CodecProfile enum value
23957	H264CodecProfileHigh = "HIGH"
23958
23959	// H264CodecProfileHigh10bit is a H264CodecProfile enum value
23960	H264CodecProfileHigh10bit = "HIGH_10BIT"
23961
23962	// H264CodecProfileHigh422 is a H264CodecProfile enum value
23963	H264CodecProfileHigh422 = "HIGH_422"
23964
23965	// H264CodecProfileHigh42210bit is a H264CodecProfile enum value
23966	H264CodecProfileHigh42210bit = "HIGH_422_10BIT"
23967
23968	// H264CodecProfileMain is a H264CodecProfile enum value
23969	H264CodecProfileMain = "MAIN"
23970)
23971
23972// H264CodecProfile_Values returns all elements of the H264CodecProfile enum
23973func H264CodecProfile_Values() []string {
23974	return []string{
23975		H264CodecProfileBaseline,
23976		H264CodecProfileHigh,
23977		H264CodecProfileHigh10bit,
23978		H264CodecProfileHigh422,
23979		H264CodecProfileHigh42210bit,
23980		H264CodecProfileMain,
23981	}
23982}
23983
23984// Choose Adaptive to improve subjective video quality for high-motion content.
23985// This will cause the service to use fewer B-frames (which infer information
23986// based on other frames) for high-motion portions of the video and more B-frames
23987// for low-motion portions. The maximum number of B-frames is limited by the
23988// value you provide for the setting B frames between reference frames (numberBFramesBetweenReferenceFrames).
23989const (
23990	// H264DynamicSubGopAdaptive is a H264DynamicSubGop enum value
23991	H264DynamicSubGopAdaptive = "ADAPTIVE"
23992
23993	// H264DynamicSubGopStatic is a H264DynamicSubGop enum value
23994	H264DynamicSubGopStatic = "STATIC"
23995)
23996
23997// H264DynamicSubGop_Values returns all elements of the H264DynamicSubGop enum
23998func H264DynamicSubGop_Values() []string {
23999	return []string{
24000		H264DynamicSubGopAdaptive,
24001		H264DynamicSubGopStatic,
24002	}
24003}
24004
24005// Entropy encoding mode. Use CABAC (must be in Main or High profile) or CAVLC.
24006const (
24007	// H264EntropyEncodingCabac is a H264EntropyEncoding enum value
24008	H264EntropyEncodingCabac = "CABAC"
24009
24010	// H264EntropyEncodingCavlc is a H264EntropyEncoding enum value
24011	H264EntropyEncodingCavlc = "CAVLC"
24012)
24013
24014// H264EntropyEncoding_Values returns all elements of the H264EntropyEncoding enum
24015func H264EntropyEncoding_Values() []string {
24016	return []string{
24017		H264EntropyEncodingCabac,
24018		H264EntropyEncodingCavlc,
24019	}
24020}
24021
24022// Keep the default value, PAFF, to have MediaConvert use PAFF encoding for
24023// interlaced outputs. Choose Force field (FORCE_FIELD) to disable PAFF encoding
24024// and create separate interlaced fields.
24025const (
24026	// H264FieldEncodingPaff is a H264FieldEncoding enum value
24027	H264FieldEncodingPaff = "PAFF"
24028
24029	// H264FieldEncodingForceField is a H264FieldEncoding enum value
24030	H264FieldEncodingForceField = "FORCE_FIELD"
24031)
24032
24033// H264FieldEncoding_Values returns all elements of the H264FieldEncoding enum
24034func H264FieldEncoding_Values() []string {
24035	return []string{
24036		H264FieldEncodingPaff,
24037		H264FieldEncodingForceField,
24038	}
24039}
24040
24041// Only use this setting when you change the default value, AUTO, for the setting
24042// H264AdaptiveQuantization. When you keep all defaults, excluding H264AdaptiveQuantization
24043// and all other adaptive quantization from your JSON job specification, MediaConvert
24044// automatically applies the best types of quantization for your video content.
24045// When you set H264AdaptiveQuantization to a value other than AUTO, the default
24046// value for H264FlickerAdaptiveQuantization is Disabled (DISABLED). Change
24047// this value to Enabled (ENABLED) to reduce I-frame pop. I-frame pop appears
24048// as a visual flicker that can arise when the encoder saves bits by copying
24049// some macroblocks many times from frame to frame, and then refreshes them
24050// at the I-frame. When you enable this setting, the encoder updates these macroblocks
24051// slightly more often to smooth out the flicker. To manually enable or disable
24052// H264FlickerAdaptiveQuantization, you must set Adaptive quantization (H264AdaptiveQuantization)
24053// to a value other than AUTO.
24054const (
24055	// H264FlickerAdaptiveQuantizationDisabled is a H264FlickerAdaptiveQuantization enum value
24056	H264FlickerAdaptiveQuantizationDisabled = "DISABLED"
24057
24058	// H264FlickerAdaptiveQuantizationEnabled is a H264FlickerAdaptiveQuantization enum value
24059	H264FlickerAdaptiveQuantizationEnabled = "ENABLED"
24060)
24061
24062// H264FlickerAdaptiveQuantization_Values returns all elements of the H264FlickerAdaptiveQuantization enum
24063func H264FlickerAdaptiveQuantization_Values() []string {
24064	return []string{
24065		H264FlickerAdaptiveQuantizationDisabled,
24066		H264FlickerAdaptiveQuantizationEnabled,
24067	}
24068}
24069
24070// If you are using the console, use the Framerate setting to specify the frame
24071// rate for this output. If you want to keep the same frame rate as the input
24072// video, choose Follow source. If you want to do frame rate conversion, choose
24073// a frame rate from the dropdown list or choose Custom. The framerates shown
24074// in the dropdown list are decimal approximations of fractions. If you choose
24075// Custom, specify your frame rate as a fraction. If you are creating your transcoding
24076// job specification as a JSON file without the console, use FramerateControl
24077// to specify which value the service uses for the frame rate for this output.
24078// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
24079// from the input. Choose SPECIFIED if you want the service to use the frame
24080// rate you specify in the settings FramerateNumerator and FramerateDenominator.
24081const (
24082	// H264FramerateControlInitializeFromSource is a H264FramerateControl enum value
24083	H264FramerateControlInitializeFromSource = "INITIALIZE_FROM_SOURCE"
24084
24085	// H264FramerateControlSpecified is a H264FramerateControl enum value
24086	H264FramerateControlSpecified = "SPECIFIED"
24087)
24088
24089// H264FramerateControl_Values returns all elements of the H264FramerateControl enum
24090func H264FramerateControl_Values() []string {
24091	return []string{
24092		H264FramerateControlInitializeFromSource,
24093		H264FramerateControlSpecified,
24094	}
24095}
24096
24097// Choose the method that you want MediaConvert to use when increasing or decreasing
24098// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
24099// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
24100// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
24101// smooth picture, but might introduce undesirable video artifacts. For complex
24102// frame rate conversions, especially if your source video has already been
24103// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
24104// motion-compensated interpolation. FrameFormer chooses the best conversion
24105// method frame by frame. Note that using FrameFormer increases the transcoding
24106// time and incurs a significant add-on cost.
24107const (
24108	// H264FramerateConversionAlgorithmDuplicateDrop is a H264FramerateConversionAlgorithm enum value
24109	H264FramerateConversionAlgorithmDuplicateDrop = "DUPLICATE_DROP"
24110
24111	// H264FramerateConversionAlgorithmInterpolate is a H264FramerateConversionAlgorithm enum value
24112	H264FramerateConversionAlgorithmInterpolate = "INTERPOLATE"
24113
24114	// H264FramerateConversionAlgorithmFrameformer is a H264FramerateConversionAlgorithm enum value
24115	H264FramerateConversionAlgorithmFrameformer = "FRAMEFORMER"
24116)
24117
24118// H264FramerateConversionAlgorithm_Values returns all elements of the H264FramerateConversionAlgorithm enum
24119func H264FramerateConversionAlgorithm_Values() []string {
24120	return []string{
24121		H264FramerateConversionAlgorithmDuplicateDrop,
24122		H264FramerateConversionAlgorithmInterpolate,
24123		H264FramerateConversionAlgorithmFrameformer,
24124	}
24125}
24126
24127// If enable, use reference B frames for GOP structures that have B frames >
24128// 1.
24129const (
24130	// H264GopBReferenceDisabled is a H264GopBReference enum value
24131	H264GopBReferenceDisabled = "DISABLED"
24132
24133	// H264GopBReferenceEnabled is a H264GopBReference enum value
24134	H264GopBReferenceEnabled = "ENABLED"
24135)
24136
24137// H264GopBReference_Values returns all elements of the H264GopBReference enum
24138func H264GopBReference_Values() []string {
24139	return []string{
24140		H264GopBReferenceDisabled,
24141		H264GopBReferenceEnabled,
24142	}
24143}
24144
24145// Indicates if the GOP Size in H264 is specified in frames or seconds. If seconds
24146// the system will convert the GOP Size into a frame count at run time.
24147const (
24148	// H264GopSizeUnitsFrames is a H264GopSizeUnits enum value
24149	H264GopSizeUnitsFrames = "FRAMES"
24150
24151	// H264GopSizeUnitsSeconds is a H264GopSizeUnits enum value
24152	H264GopSizeUnitsSeconds = "SECONDS"
24153)
24154
24155// H264GopSizeUnits_Values returns all elements of the H264GopSizeUnits enum
24156func H264GopSizeUnits_Values() []string {
24157	return []string{
24158		H264GopSizeUnitsFrames,
24159		H264GopSizeUnitsSeconds,
24160	}
24161}
24162
24163// Choose the scan line type for the output. Keep the default value, Progressive
24164// (PROGRESSIVE) to create a progressive output, regardless of the scan type
24165// of your input. Use Top field first (TOP_FIELD) or Bottom field first (BOTTOM_FIELD)
24166// to create an output that's interlaced with the same field polarity throughout.
24167// Use Follow, default top (FOLLOW_TOP_FIELD) or Follow, default bottom (FOLLOW_BOTTOM_FIELD)
24168// to produce outputs with the same field polarity as the source. For jobs that
24169// have multiple inputs, the output field polarity might change over the course
24170// of the output. Follow behavior depends on the input scan type. If the source
24171// is interlaced, the output will be interlaced with the same polarity as the
24172// source. If the source is progressive, the output will be interlaced with
24173// top field bottom field first, depending on which of the Follow options you
24174// choose.
24175const (
24176	// H264InterlaceModeProgressive is a H264InterlaceMode enum value
24177	H264InterlaceModeProgressive = "PROGRESSIVE"
24178
24179	// H264InterlaceModeTopField is a H264InterlaceMode enum value
24180	H264InterlaceModeTopField = "TOP_FIELD"
24181
24182	// H264InterlaceModeBottomField is a H264InterlaceMode enum value
24183	H264InterlaceModeBottomField = "BOTTOM_FIELD"
24184
24185	// H264InterlaceModeFollowTopField is a H264InterlaceMode enum value
24186	H264InterlaceModeFollowTopField = "FOLLOW_TOP_FIELD"
24187
24188	// H264InterlaceModeFollowBottomField is a H264InterlaceMode enum value
24189	H264InterlaceModeFollowBottomField = "FOLLOW_BOTTOM_FIELD"
24190)
24191
24192// H264InterlaceMode_Values returns all elements of the H264InterlaceMode enum
24193func H264InterlaceMode_Values() []string {
24194	return []string{
24195		H264InterlaceModeProgressive,
24196		H264InterlaceModeTopField,
24197		H264InterlaceModeBottomField,
24198		H264InterlaceModeFollowTopField,
24199		H264InterlaceModeFollowBottomField,
24200	}
24201}
24202
24203// Optional. Specify how the service determines the pixel aspect ratio (PAR)
24204// for this output. The default behavior, Follow source (INITIALIZE_FROM_SOURCE),
24205// uses the PAR from your input video for your output. To specify a different
24206// PAR in the console, choose any value other than Follow source. To specify
24207// a different PAR by editing the JSON job specification, choose SPECIFIED.
24208// When you choose SPECIFIED for this setting, you must also specify values
24209// for the parNumerator and parDenominator settings.
24210const (
24211	// H264ParControlInitializeFromSource is a H264ParControl enum value
24212	H264ParControlInitializeFromSource = "INITIALIZE_FROM_SOURCE"
24213
24214	// H264ParControlSpecified is a H264ParControl enum value
24215	H264ParControlSpecified = "SPECIFIED"
24216)
24217
24218// H264ParControl_Values returns all elements of the H264ParControl enum
24219func H264ParControl_Values() []string {
24220	return []string{
24221		H264ParControlInitializeFromSource,
24222		H264ParControlSpecified,
24223	}
24224}
24225
24226// Optional. Use Quality tuning level (qualityTuningLevel) to choose how you
24227// want to trade off encoding speed for output video quality. The default behavior
24228// is faster, lower quality, single-pass encoding.
24229const (
24230	// H264QualityTuningLevelSinglePass is a H264QualityTuningLevel enum value
24231	H264QualityTuningLevelSinglePass = "SINGLE_PASS"
24232
24233	// H264QualityTuningLevelSinglePassHq is a H264QualityTuningLevel enum value
24234	H264QualityTuningLevelSinglePassHq = "SINGLE_PASS_HQ"
24235
24236	// H264QualityTuningLevelMultiPassHq is a H264QualityTuningLevel enum value
24237	H264QualityTuningLevelMultiPassHq = "MULTI_PASS_HQ"
24238)
24239
24240// H264QualityTuningLevel_Values returns all elements of the H264QualityTuningLevel enum
24241func H264QualityTuningLevel_Values() []string {
24242	return []string{
24243		H264QualityTuningLevelSinglePass,
24244		H264QualityTuningLevelSinglePassHq,
24245		H264QualityTuningLevelMultiPassHq,
24246	}
24247}
24248
24249// Use this setting to specify whether this output has a variable bitrate (VBR),
24250// constant bitrate (CBR) or quality-defined variable bitrate (QVBR).
24251const (
24252	// H264RateControlModeVbr is a H264RateControlMode enum value
24253	H264RateControlModeVbr = "VBR"
24254
24255	// H264RateControlModeCbr is a H264RateControlMode enum value
24256	H264RateControlModeCbr = "CBR"
24257
24258	// H264RateControlModeQvbr is a H264RateControlMode enum value
24259	H264RateControlModeQvbr = "QVBR"
24260)
24261
24262// H264RateControlMode_Values returns all elements of the H264RateControlMode enum
24263func H264RateControlMode_Values() []string {
24264	return []string{
24265		H264RateControlModeVbr,
24266		H264RateControlModeCbr,
24267		H264RateControlModeQvbr,
24268	}
24269}
24270
24271// Places a PPS header on each encoded picture, even if repeated.
24272const (
24273	// H264RepeatPpsDisabled is a H264RepeatPps enum value
24274	H264RepeatPpsDisabled = "DISABLED"
24275
24276	// H264RepeatPpsEnabled is a H264RepeatPps enum value
24277	H264RepeatPpsEnabled = "ENABLED"
24278)
24279
24280// H264RepeatPps_Values returns all elements of the H264RepeatPps enum
24281func H264RepeatPps_Values() []string {
24282	return []string{
24283		H264RepeatPpsDisabled,
24284		H264RepeatPpsEnabled,
24285	}
24286}
24287
24288// Use this setting for interlaced outputs, when your output frame rate is half
24289// of your input frame rate. In this situation, choose Optimized interlacing
24290// (INTERLACED_OPTIMIZE) to create a better quality interlaced output. In this
24291// case, each progressive frame from the input corresponds to an interlaced
24292// field in the output. Keep the default value, Basic interlacing (INTERLACED),
24293// for all other output frame rates. With basic interlacing, MediaConvert performs
24294// any frame rate conversion first and then interlaces the frames. When you
24295// choose Optimized interlacing and you set your output frame rate to a value
24296// that isn't suitable for optimized interlacing, MediaConvert automatically
24297// falls back to basic interlacing. Required settings: To use optimized interlacing,
24298// you must set Telecine (telecine) to None (NONE) or Soft (SOFT). You can't
24299// use optimized interlacing for hard telecine outputs. You must also set Interlace
24300// mode (interlaceMode) to a value other than Progressive (PROGRESSIVE).
24301const (
24302	// H264ScanTypeConversionModeInterlaced is a H264ScanTypeConversionMode enum value
24303	H264ScanTypeConversionModeInterlaced = "INTERLACED"
24304
24305	// H264ScanTypeConversionModeInterlacedOptimize is a H264ScanTypeConversionMode enum value
24306	H264ScanTypeConversionModeInterlacedOptimize = "INTERLACED_OPTIMIZE"
24307)
24308
24309// H264ScanTypeConversionMode_Values returns all elements of the H264ScanTypeConversionMode enum
24310func H264ScanTypeConversionMode_Values() []string {
24311	return []string{
24312		H264ScanTypeConversionModeInterlaced,
24313		H264ScanTypeConversionModeInterlacedOptimize,
24314	}
24315}
24316
24317// Enable this setting to insert I-frames at scene changes that the service
24318// automatically detects. This improves video quality and is enabled by default.
24319// If this output uses QVBR, choose Transition detection (TRANSITION_DETECTION)
24320// for further video quality improvement. For more information about QVBR, see
24321// https://docs.aws.amazon.com/console/mediaconvert/cbr-vbr-qvbr.
24322const (
24323	// H264SceneChangeDetectDisabled is a H264SceneChangeDetect enum value
24324	H264SceneChangeDetectDisabled = "DISABLED"
24325
24326	// H264SceneChangeDetectEnabled is a H264SceneChangeDetect enum value
24327	H264SceneChangeDetectEnabled = "ENABLED"
24328
24329	// H264SceneChangeDetectTransitionDetection is a H264SceneChangeDetect enum value
24330	H264SceneChangeDetectTransitionDetection = "TRANSITION_DETECTION"
24331)
24332
24333// H264SceneChangeDetect_Values returns all elements of the H264SceneChangeDetect enum
24334func H264SceneChangeDetect_Values() []string {
24335	return []string{
24336		H264SceneChangeDetectDisabled,
24337		H264SceneChangeDetectEnabled,
24338		H264SceneChangeDetectTransitionDetection,
24339	}
24340}
24341
24342// Ignore this setting unless your input frame rate is 23.976 or 24 frames per
24343// second (fps). Enable slow PAL to create a 25 fps output. When you enable
24344// slow PAL, MediaConvert relabels the video frames to 25 fps and resamples
24345// your audio to keep it synchronized with the video. Note that enabling this
24346// setting will slightly reduce the duration of your video. Required settings:
24347// You must also set Framerate to 25. In your JSON job specification, set (framerateControl)
24348// to (SPECIFIED), (framerateNumerator) to 25 and (framerateDenominator) to
24349// 1.
24350const (
24351	// H264SlowPalDisabled is a H264SlowPal enum value
24352	H264SlowPalDisabled = "DISABLED"
24353
24354	// H264SlowPalEnabled is a H264SlowPal enum value
24355	H264SlowPalEnabled = "ENABLED"
24356)
24357
24358// H264SlowPal_Values returns all elements of the H264SlowPal enum
24359func H264SlowPal_Values() []string {
24360	return []string{
24361		H264SlowPalDisabled,
24362		H264SlowPalEnabled,
24363	}
24364}
24365
24366// Only use this setting when you change the default value, Auto (AUTO), for
24367// the setting H264AdaptiveQuantization. When you keep all defaults, excluding
24368// H264AdaptiveQuantization and all other adaptive quantization from your JSON
24369// job specification, MediaConvert automatically applies the best types of quantization
24370// for your video content. When you set H264AdaptiveQuantization to a value
24371// other than AUTO, the default value for H264SpatialAdaptiveQuantization is
24372// Enabled (ENABLED). Keep this default value to adjust quantization within
24373// each frame based on spatial variation of content complexity. When you enable
24374// this feature, the encoder uses fewer bits on areas that can sustain more
24375// distortion with no noticeable visual degradation and uses more bits on areas
24376// where any small distortion will be noticeable. For example, complex textured
24377// blocks are encoded with fewer bits and smooth textured blocks are encoded
24378// with more bits. Enabling this feature will almost always improve your video
24379// quality. Note, though, that this feature doesn't take into account where
24380// the viewer's attention is likely to be. If viewers are likely to be focusing
24381// their attention on a part of the screen with a lot of complex texture, you
24382// might choose to set H264SpatialAdaptiveQuantization to Disabled (DISABLED).
24383// Related setting: When you enable spatial adaptive quantization, set the value
24384// for Adaptive quantization (H264AdaptiveQuantization) depending on your content.
24385// For homogeneous content, such as cartoons and video games, set it to Low.
24386// For content with a wider variety of textures, set it to High or Higher. To
24387// manually enable or disable H264SpatialAdaptiveQuantization, you must set
24388// Adaptive quantization (H264AdaptiveQuantization) to a value other than AUTO.
24389const (
24390	// H264SpatialAdaptiveQuantizationDisabled is a H264SpatialAdaptiveQuantization enum value
24391	H264SpatialAdaptiveQuantizationDisabled = "DISABLED"
24392
24393	// H264SpatialAdaptiveQuantizationEnabled is a H264SpatialAdaptiveQuantization enum value
24394	H264SpatialAdaptiveQuantizationEnabled = "ENABLED"
24395)
24396
24397// H264SpatialAdaptiveQuantization_Values returns all elements of the H264SpatialAdaptiveQuantization enum
24398func H264SpatialAdaptiveQuantization_Values() []string {
24399	return []string{
24400		H264SpatialAdaptiveQuantizationDisabled,
24401		H264SpatialAdaptiveQuantizationEnabled,
24402	}
24403}
24404
24405// Produces a bitstream compliant with SMPTE RP-2027.
24406const (
24407	// H264SyntaxDefault is a H264Syntax enum value
24408	H264SyntaxDefault = "DEFAULT"
24409
24410	// H264SyntaxRp2027 is a H264Syntax enum value
24411	H264SyntaxRp2027 = "RP2027"
24412)
24413
24414// H264Syntax_Values returns all elements of the H264Syntax enum
24415func H264Syntax_Values() []string {
24416	return []string{
24417		H264SyntaxDefault,
24418		H264SyntaxRp2027,
24419	}
24420}
24421
24422// When you do frame rate conversion from 23.976 frames per second (fps) to
24423// 29.97 fps, and your output scan type is interlaced, you can optionally enable
24424// hard or soft telecine to create a smoother picture. Hard telecine (HARD)
24425// produces a 29.97i output. Soft telecine (SOFT) produces an output with a
24426// 23.976 output that signals to the video player device to do the conversion
24427// during play back. When you keep the default value, None (NONE), MediaConvert
24428// does a standard frame rate conversion to 29.97 without doing anything with
24429// the field polarity to create a smoother picture.
24430const (
24431	// H264TelecineNone is a H264Telecine enum value
24432	H264TelecineNone = "NONE"
24433
24434	// H264TelecineSoft is a H264Telecine enum value
24435	H264TelecineSoft = "SOFT"
24436
24437	// H264TelecineHard is a H264Telecine enum value
24438	H264TelecineHard = "HARD"
24439)
24440
24441// H264Telecine_Values returns all elements of the H264Telecine enum
24442func H264Telecine_Values() []string {
24443	return []string{
24444		H264TelecineNone,
24445		H264TelecineSoft,
24446		H264TelecineHard,
24447	}
24448}
24449
24450// Only use this setting when you change the default value, AUTO, for the setting
24451// H264AdaptiveQuantization. When you keep all defaults, excluding H264AdaptiveQuantization
24452// and all other adaptive quantization from your JSON job specification, MediaConvert
24453// automatically applies the best types of quantization for your video content.
24454// When you set H264AdaptiveQuantization to a value other than AUTO, the default
24455// value for H264TemporalAdaptiveQuantization is Enabled (ENABLED). Keep this
24456// default value to adjust quantization within each frame based on temporal
24457// variation of content complexity. When you enable this feature, the encoder
24458// uses fewer bits on areas of the frame that aren't moving and uses more bits
24459// on complex objects with sharp edges that move a lot. For example, this feature
24460// improves the readability of text tickers on newscasts and scoreboards on
24461// sports matches. Enabling this feature will almost always improve your video
24462// quality. Note, though, that this feature doesn't take into account where
24463// the viewer's attention is likely to be. If viewers are likely to be focusing
24464// their attention on a part of the screen that doesn't have moving objects
24465// with sharp edges, such as sports athletes' faces, you might choose to set
24466// H264TemporalAdaptiveQuantization to Disabled (DISABLED). Related setting:
24467// When you enable temporal quantization, adjust the strength of the filter
24468// with the setting Adaptive quantization (adaptiveQuantization). To manually
24469// enable or disable H264TemporalAdaptiveQuantization, you must set Adaptive
24470// quantization (H264AdaptiveQuantization) to a value other than AUTO.
24471const (
24472	// H264TemporalAdaptiveQuantizationDisabled is a H264TemporalAdaptiveQuantization enum value
24473	H264TemporalAdaptiveQuantizationDisabled = "DISABLED"
24474
24475	// H264TemporalAdaptiveQuantizationEnabled is a H264TemporalAdaptiveQuantization enum value
24476	H264TemporalAdaptiveQuantizationEnabled = "ENABLED"
24477)
24478
24479// H264TemporalAdaptiveQuantization_Values returns all elements of the H264TemporalAdaptiveQuantization enum
24480func H264TemporalAdaptiveQuantization_Values() []string {
24481	return []string{
24482		H264TemporalAdaptiveQuantizationDisabled,
24483		H264TemporalAdaptiveQuantizationEnabled,
24484	}
24485}
24486
24487// Inserts timecode for each frame as 4 bytes of an unregistered SEI message.
24488const (
24489	// H264UnregisteredSeiTimecodeDisabled is a H264UnregisteredSeiTimecode enum value
24490	H264UnregisteredSeiTimecodeDisabled = "DISABLED"
24491
24492	// H264UnregisteredSeiTimecodeEnabled is a H264UnregisteredSeiTimecode enum value
24493	H264UnregisteredSeiTimecodeEnabled = "ENABLED"
24494)
24495
24496// H264UnregisteredSeiTimecode_Values returns all elements of the H264UnregisteredSeiTimecode enum
24497func H264UnregisteredSeiTimecode_Values() []string {
24498	return []string{
24499		H264UnregisteredSeiTimecodeDisabled,
24500		H264UnregisteredSeiTimecodeEnabled,
24501	}
24502}
24503
24504// Specify the strength of any adaptive quantization filters that you enable.
24505// The value that you choose here applies to the following settings: Flicker
24506// adaptive quantization (flickerAdaptiveQuantization), Spatial adaptive quantization
24507// (spatialAdaptiveQuantization), and Temporal adaptive quantization (temporalAdaptiveQuantization).
24508const (
24509	// H265AdaptiveQuantizationOff is a H265AdaptiveQuantization enum value
24510	H265AdaptiveQuantizationOff = "OFF"
24511
24512	// H265AdaptiveQuantizationLow is a H265AdaptiveQuantization enum value
24513	H265AdaptiveQuantizationLow = "LOW"
24514
24515	// H265AdaptiveQuantizationMedium is a H265AdaptiveQuantization enum value
24516	H265AdaptiveQuantizationMedium = "MEDIUM"
24517
24518	// H265AdaptiveQuantizationHigh is a H265AdaptiveQuantization enum value
24519	H265AdaptiveQuantizationHigh = "HIGH"
24520
24521	// H265AdaptiveQuantizationHigher is a H265AdaptiveQuantization enum value
24522	H265AdaptiveQuantizationHigher = "HIGHER"
24523
24524	// H265AdaptiveQuantizationMax is a H265AdaptiveQuantization enum value
24525	H265AdaptiveQuantizationMax = "MAX"
24526)
24527
24528// H265AdaptiveQuantization_Values returns all elements of the H265AdaptiveQuantization enum
24529func H265AdaptiveQuantization_Values() []string {
24530	return []string{
24531		H265AdaptiveQuantizationOff,
24532		H265AdaptiveQuantizationLow,
24533		H265AdaptiveQuantizationMedium,
24534		H265AdaptiveQuantizationHigh,
24535		H265AdaptiveQuantizationHigher,
24536		H265AdaptiveQuantizationMax,
24537	}
24538}
24539
24540// Enables Alternate Transfer Function SEI message for outputs using Hybrid
24541// Log Gamma (HLG) Electro-Optical Transfer Function (EOTF).
24542const (
24543	// H265AlternateTransferFunctionSeiDisabled is a H265AlternateTransferFunctionSei enum value
24544	H265AlternateTransferFunctionSeiDisabled = "DISABLED"
24545
24546	// H265AlternateTransferFunctionSeiEnabled is a H265AlternateTransferFunctionSei enum value
24547	H265AlternateTransferFunctionSeiEnabled = "ENABLED"
24548)
24549
24550// H265AlternateTransferFunctionSei_Values returns all elements of the H265AlternateTransferFunctionSei enum
24551func H265AlternateTransferFunctionSei_Values() []string {
24552	return []string{
24553		H265AlternateTransferFunctionSeiDisabled,
24554		H265AlternateTransferFunctionSeiEnabled,
24555	}
24556}
24557
24558// H.265 Level.
24559const (
24560	// H265CodecLevelAuto is a H265CodecLevel enum value
24561	H265CodecLevelAuto = "AUTO"
24562
24563	// H265CodecLevelLevel1 is a H265CodecLevel enum value
24564	H265CodecLevelLevel1 = "LEVEL_1"
24565
24566	// H265CodecLevelLevel2 is a H265CodecLevel enum value
24567	H265CodecLevelLevel2 = "LEVEL_2"
24568
24569	// H265CodecLevelLevel21 is a H265CodecLevel enum value
24570	H265CodecLevelLevel21 = "LEVEL_2_1"
24571
24572	// H265CodecLevelLevel3 is a H265CodecLevel enum value
24573	H265CodecLevelLevel3 = "LEVEL_3"
24574
24575	// H265CodecLevelLevel31 is a H265CodecLevel enum value
24576	H265CodecLevelLevel31 = "LEVEL_3_1"
24577
24578	// H265CodecLevelLevel4 is a H265CodecLevel enum value
24579	H265CodecLevelLevel4 = "LEVEL_4"
24580
24581	// H265CodecLevelLevel41 is a H265CodecLevel enum value
24582	H265CodecLevelLevel41 = "LEVEL_4_1"
24583
24584	// H265CodecLevelLevel5 is a H265CodecLevel enum value
24585	H265CodecLevelLevel5 = "LEVEL_5"
24586
24587	// H265CodecLevelLevel51 is a H265CodecLevel enum value
24588	H265CodecLevelLevel51 = "LEVEL_5_1"
24589
24590	// H265CodecLevelLevel52 is a H265CodecLevel enum value
24591	H265CodecLevelLevel52 = "LEVEL_5_2"
24592
24593	// H265CodecLevelLevel6 is a H265CodecLevel enum value
24594	H265CodecLevelLevel6 = "LEVEL_6"
24595
24596	// H265CodecLevelLevel61 is a H265CodecLevel enum value
24597	H265CodecLevelLevel61 = "LEVEL_6_1"
24598
24599	// H265CodecLevelLevel62 is a H265CodecLevel enum value
24600	H265CodecLevelLevel62 = "LEVEL_6_2"
24601)
24602
24603// H265CodecLevel_Values returns all elements of the H265CodecLevel enum
24604func H265CodecLevel_Values() []string {
24605	return []string{
24606		H265CodecLevelAuto,
24607		H265CodecLevelLevel1,
24608		H265CodecLevelLevel2,
24609		H265CodecLevelLevel21,
24610		H265CodecLevelLevel3,
24611		H265CodecLevelLevel31,
24612		H265CodecLevelLevel4,
24613		H265CodecLevelLevel41,
24614		H265CodecLevelLevel5,
24615		H265CodecLevelLevel51,
24616		H265CodecLevelLevel52,
24617		H265CodecLevelLevel6,
24618		H265CodecLevelLevel61,
24619		H265CodecLevelLevel62,
24620	}
24621}
24622
24623// Represents the Profile and Tier, per the HEVC (H.265) specification. Selections
24624// are grouped as [Profile] / [Tier], so "Main/High" represents Main Profile
24625// with High Tier. 4:2:2 profiles are only available with the HEVC 4:2:2 License.
24626const (
24627	// H265CodecProfileMainMain is a H265CodecProfile enum value
24628	H265CodecProfileMainMain = "MAIN_MAIN"
24629
24630	// H265CodecProfileMainHigh is a H265CodecProfile enum value
24631	H265CodecProfileMainHigh = "MAIN_HIGH"
24632
24633	// H265CodecProfileMain10Main is a H265CodecProfile enum value
24634	H265CodecProfileMain10Main = "MAIN10_MAIN"
24635
24636	// H265CodecProfileMain10High is a H265CodecProfile enum value
24637	H265CodecProfileMain10High = "MAIN10_HIGH"
24638
24639	// H265CodecProfileMain4228bitMain is a H265CodecProfile enum value
24640	H265CodecProfileMain4228bitMain = "MAIN_422_8BIT_MAIN"
24641
24642	// H265CodecProfileMain4228bitHigh is a H265CodecProfile enum value
24643	H265CodecProfileMain4228bitHigh = "MAIN_422_8BIT_HIGH"
24644
24645	// H265CodecProfileMain42210bitMain is a H265CodecProfile enum value
24646	H265CodecProfileMain42210bitMain = "MAIN_422_10BIT_MAIN"
24647
24648	// H265CodecProfileMain42210bitHigh is a H265CodecProfile enum value
24649	H265CodecProfileMain42210bitHigh = "MAIN_422_10BIT_HIGH"
24650)
24651
24652// H265CodecProfile_Values returns all elements of the H265CodecProfile enum
24653func H265CodecProfile_Values() []string {
24654	return []string{
24655		H265CodecProfileMainMain,
24656		H265CodecProfileMainHigh,
24657		H265CodecProfileMain10Main,
24658		H265CodecProfileMain10High,
24659		H265CodecProfileMain4228bitMain,
24660		H265CodecProfileMain4228bitHigh,
24661		H265CodecProfileMain42210bitMain,
24662		H265CodecProfileMain42210bitHigh,
24663	}
24664}
24665
24666// Choose Adaptive to improve subjective video quality for high-motion content.
24667// This will cause the service to use fewer B-frames (which infer information
24668// based on other frames) for high-motion portions of the video and more B-frames
24669// for low-motion portions. The maximum number of B-frames is limited by the
24670// value you provide for the setting B frames between reference frames (numberBFramesBetweenReferenceFrames).
24671const (
24672	// H265DynamicSubGopAdaptive is a H265DynamicSubGop enum value
24673	H265DynamicSubGopAdaptive = "ADAPTIVE"
24674
24675	// H265DynamicSubGopStatic is a H265DynamicSubGop enum value
24676	H265DynamicSubGopStatic = "STATIC"
24677)
24678
24679// H265DynamicSubGop_Values returns all elements of the H265DynamicSubGop enum
24680func H265DynamicSubGop_Values() []string {
24681	return []string{
24682		H265DynamicSubGopAdaptive,
24683		H265DynamicSubGopStatic,
24684	}
24685}
24686
24687// Enable this setting to have the encoder reduce I-frame pop. I-frame pop appears
24688// as a visual flicker that can arise when the encoder saves bits by copying
24689// some macroblocks many times from frame to frame, and then refreshes them
24690// at the I-frame. When you enable this setting, the encoder updates these macroblocks
24691// slightly more often to smooth out the flicker. This setting is disabled by
24692// default. Related setting: In addition to enabling this setting, you must
24693// also set adaptiveQuantization to a value other than Off (OFF).
24694const (
24695	// H265FlickerAdaptiveQuantizationDisabled is a H265FlickerAdaptiveQuantization enum value
24696	H265FlickerAdaptiveQuantizationDisabled = "DISABLED"
24697
24698	// H265FlickerAdaptiveQuantizationEnabled is a H265FlickerAdaptiveQuantization enum value
24699	H265FlickerAdaptiveQuantizationEnabled = "ENABLED"
24700)
24701
24702// H265FlickerAdaptiveQuantization_Values returns all elements of the H265FlickerAdaptiveQuantization enum
24703func H265FlickerAdaptiveQuantization_Values() []string {
24704	return []string{
24705		H265FlickerAdaptiveQuantizationDisabled,
24706		H265FlickerAdaptiveQuantizationEnabled,
24707	}
24708}
24709
24710// If you are using the console, use the Framerate setting to specify the frame
24711// rate for this output. If you want to keep the same frame rate as the input
24712// video, choose Follow source. If you want to do frame rate conversion, choose
24713// a frame rate from the dropdown list or choose Custom. The framerates shown
24714// in the dropdown list are decimal approximations of fractions. If you choose
24715// Custom, specify your frame rate as a fraction. If you are creating your transcoding
24716// job specification as a JSON file without the console, use FramerateControl
24717// to specify which value the service uses for the frame rate for this output.
24718// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
24719// from the input. Choose SPECIFIED if you want the service to use the frame
24720// rate you specify in the settings FramerateNumerator and FramerateDenominator.
24721const (
24722	// H265FramerateControlInitializeFromSource is a H265FramerateControl enum value
24723	H265FramerateControlInitializeFromSource = "INITIALIZE_FROM_SOURCE"
24724
24725	// H265FramerateControlSpecified is a H265FramerateControl enum value
24726	H265FramerateControlSpecified = "SPECIFIED"
24727)
24728
24729// H265FramerateControl_Values returns all elements of the H265FramerateControl enum
24730func H265FramerateControl_Values() []string {
24731	return []string{
24732		H265FramerateControlInitializeFromSource,
24733		H265FramerateControlSpecified,
24734	}
24735}
24736
24737// Choose the method that you want MediaConvert to use when increasing or decreasing
24738// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
24739// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
24740// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
24741// smooth picture, but might introduce undesirable video artifacts. For complex
24742// frame rate conversions, especially if your source video has already been
24743// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
24744// motion-compensated interpolation. FrameFormer chooses the best conversion
24745// method frame by frame. Note that using FrameFormer increases the transcoding
24746// time and incurs a significant add-on cost.
24747const (
24748	// H265FramerateConversionAlgorithmDuplicateDrop is a H265FramerateConversionAlgorithm enum value
24749	H265FramerateConversionAlgorithmDuplicateDrop = "DUPLICATE_DROP"
24750
24751	// H265FramerateConversionAlgorithmInterpolate is a H265FramerateConversionAlgorithm enum value
24752	H265FramerateConversionAlgorithmInterpolate = "INTERPOLATE"
24753
24754	// H265FramerateConversionAlgorithmFrameformer is a H265FramerateConversionAlgorithm enum value
24755	H265FramerateConversionAlgorithmFrameformer = "FRAMEFORMER"
24756)
24757
24758// H265FramerateConversionAlgorithm_Values returns all elements of the H265FramerateConversionAlgorithm enum
24759func H265FramerateConversionAlgorithm_Values() []string {
24760	return []string{
24761		H265FramerateConversionAlgorithmDuplicateDrop,
24762		H265FramerateConversionAlgorithmInterpolate,
24763		H265FramerateConversionAlgorithmFrameformer,
24764	}
24765}
24766
24767// If enable, use reference B frames for GOP structures that have B frames >
24768// 1.
24769const (
24770	// H265GopBReferenceDisabled is a H265GopBReference enum value
24771	H265GopBReferenceDisabled = "DISABLED"
24772
24773	// H265GopBReferenceEnabled is a H265GopBReference enum value
24774	H265GopBReferenceEnabled = "ENABLED"
24775)
24776
24777// H265GopBReference_Values returns all elements of the H265GopBReference enum
24778func H265GopBReference_Values() []string {
24779	return []string{
24780		H265GopBReferenceDisabled,
24781		H265GopBReferenceEnabled,
24782	}
24783}
24784
24785// Indicates if the GOP Size in H265 is specified in frames or seconds. If seconds
24786// the system will convert the GOP Size into a frame count at run time.
24787const (
24788	// H265GopSizeUnitsFrames is a H265GopSizeUnits enum value
24789	H265GopSizeUnitsFrames = "FRAMES"
24790
24791	// H265GopSizeUnitsSeconds is a H265GopSizeUnits enum value
24792	H265GopSizeUnitsSeconds = "SECONDS"
24793)
24794
24795// H265GopSizeUnits_Values returns all elements of the H265GopSizeUnits enum
24796func H265GopSizeUnits_Values() []string {
24797	return []string{
24798		H265GopSizeUnitsFrames,
24799		H265GopSizeUnitsSeconds,
24800	}
24801}
24802
24803// Choose the scan line type for the output. Keep the default value, Progressive
24804// (PROGRESSIVE) to create a progressive output, regardless of the scan type
24805// of your input. Use Top field first (TOP_FIELD) or Bottom field first (BOTTOM_FIELD)
24806// to create an output that's interlaced with the same field polarity throughout.
24807// Use Follow, default top (FOLLOW_TOP_FIELD) or Follow, default bottom (FOLLOW_BOTTOM_FIELD)
24808// to produce outputs with the same field polarity as the source. For jobs that
24809// have multiple inputs, the output field polarity might change over the course
24810// of the output. Follow behavior depends on the input scan type. If the source
24811// is interlaced, the output will be interlaced with the same polarity as the
24812// source. If the source is progressive, the output will be interlaced with
24813// top field bottom field first, depending on which of the Follow options you
24814// choose.
24815const (
24816	// H265InterlaceModeProgressive is a H265InterlaceMode enum value
24817	H265InterlaceModeProgressive = "PROGRESSIVE"
24818
24819	// H265InterlaceModeTopField is a H265InterlaceMode enum value
24820	H265InterlaceModeTopField = "TOP_FIELD"
24821
24822	// H265InterlaceModeBottomField is a H265InterlaceMode enum value
24823	H265InterlaceModeBottomField = "BOTTOM_FIELD"
24824
24825	// H265InterlaceModeFollowTopField is a H265InterlaceMode enum value
24826	H265InterlaceModeFollowTopField = "FOLLOW_TOP_FIELD"
24827
24828	// H265InterlaceModeFollowBottomField is a H265InterlaceMode enum value
24829	H265InterlaceModeFollowBottomField = "FOLLOW_BOTTOM_FIELD"
24830)
24831
24832// H265InterlaceMode_Values returns all elements of the H265InterlaceMode enum
24833func H265InterlaceMode_Values() []string {
24834	return []string{
24835		H265InterlaceModeProgressive,
24836		H265InterlaceModeTopField,
24837		H265InterlaceModeBottomField,
24838		H265InterlaceModeFollowTopField,
24839		H265InterlaceModeFollowBottomField,
24840	}
24841}
24842
24843// Optional. Specify how the service determines the pixel aspect ratio (PAR)
24844// for this output. The default behavior, Follow source (INITIALIZE_FROM_SOURCE),
24845// uses the PAR from your input video for your output. To specify a different
24846// PAR in the console, choose any value other than Follow source. To specify
24847// a different PAR by editing the JSON job specification, choose SPECIFIED.
24848// When you choose SPECIFIED for this setting, you must also specify values
24849// for the parNumerator and parDenominator settings.
24850const (
24851	// H265ParControlInitializeFromSource is a H265ParControl enum value
24852	H265ParControlInitializeFromSource = "INITIALIZE_FROM_SOURCE"
24853
24854	// H265ParControlSpecified is a H265ParControl enum value
24855	H265ParControlSpecified = "SPECIFIED"
24856)
24857
24858// H265ParControl_Values returns all elements of the H265ParControl enum
24859func H265ParControl_Values() []string {
24860	return []string{
24861		H265ParControlInitializeFromSource,
24862		H265ParControlSpecified,
24863	}
24864}
24865
24866// Optional. Use Quality tuning level (qualityTuningLevel) to choose how you
24867// want to trade off encoding speed for output video quality. The default behavior
24868// is faster, lower quality, single-pass encoding.
24869const (
24870	// H265QualityTuningLevelSinglePass is a H265QualityTuningLevel enum value
24871	H265QualityTuningLevelSinglePass = "SINGLE_PASS"
24872
24873	// H265QualityTuningLevelSinglePassHq is a H265QualityTuningLevel enum value
24874	H265QualityTuningLevelSinglePassHq = "SINGLE_PASS_HQ"
24875
24876	// H265QualityTuningLevelMultiPassHq is a H265QualityTuningLevel enum value
24877	H265QualityTuningLevelMultiPassHq = "MULTI_PASS_HQ"
24878)
24879
24880// H265QualityTuningLevel_Values returns all elements of the H265QualityTuningLevel enum
24881func H265QualityTuningLevel_Values() []string {
24882	return []string{
24883		H265QualityTuningLevelSinglePass,
24884		H265QualityTuningLevelSinglePassHq,
24885		H265QualityTuningLevelMultiPassHq,
24886	}
24887}
24888
24889// Use this setting to specify whether this output has a variable bitrate (VBR),
24890// constant bitrate (CBR) or quality-defined variable bitrate (QVBR).
24891const (
24892	// H265RateControlModeVbr is a H265RateControlMode enum value
24893	H265RateControlModeVbr = "VBR"
24894
24895	// H265RateControlModeCbr is a H265RateControlMode enum value
24896	H265RateControlModeCbr = "CBR"
24897
24898	// H265RateControlModeQvbr is a H265RateControlMode enum value
24899	H265RateControlModeQvbr = "QVBR"
24900)
24901
24902// H265RateControlMode_Values returns all elements of the H265RateControlMode enum
24903func H265RateControlMode_Values() []string {
24904	return []string{
24905		H265RateControlModeVbr,
24906		H265RateControlModeCbr,
24907		H265RateControlModeQvbr,
24908	}
24909}
24910
24911// Specify Sample Adaptive Offset (SAO) filter strength. Adaptive mode dynamically
24912// selects best strength based on content
24913const (
24914	// H265SampleAdaptiveOffsetFilterModeDefault is a H265SampleAdaptiveOffsetFilterMode enum value
24915	H265SampleAdaptiveOffsetFilterModeDefault = "DEFAULT"
24916
24917	// H265SampleAdaptiveOffsetFilterModeAdaptive is a H265SampleAdaptiveOffsetFilterMode enum value
24918	H265SampleAdaptiveOffsetFilterModeAdaptive = "ADAPTIVE"
24919
24920	// H265SampleAdaptiveOffsetFilterModeOff is a H265SampleAdaptiveOffsetFilterMode enum value
24921	H265SampleAdaptiveOffsetFilterModeOff = "OFF"
24922)
24923
24924// H265SampleAdaptiveOffsetFilterMode_Values returns all elements of the H265SampleAdaptiveOffsetFilterMode enum
24925func H265SampleAdaptiveOffsetFilterMode_Values() []string {
24926	return []string{
24927		H265SampleAdaptiveOffsetFilterModeDefault,
24928		H265SampleAdaptiveOffsetFilterModeAdaptive,
24929		H265SampleAdaptiveOffsetFilterModeOff,
24930	}
24931}
24932
24933// Use this setting for interlaced outputs, when your output frame rate is half
24934// of your input frame rate. In this situation, choose Optimized interlacing
24935// (INTERLACED_OPTIMIZE) to create a better quality interlaced output. In this
24936// case, each progressive frame from the input corresponds to an interlaced
24937// field in the output. Keep the default value, Basic interlacing (INTERLACED),
24938// for all other output frame rates. With basic interlacing, MediaConvert performs
24939// any frame rate conversion first and then interlaces the frames. When you
24940// choose Optimized interlacing and you set your output frame rate to a value
24941// that isn't suitable for optimized interlacing, MediaConvert automatically
24942// falls back to basic interlacing. Required settings: To use optimized interlacing,
24943// you must set Telecine (telecine) to None (NONE) or Soft (SOFT). You can't
24944// use optimized interlacing for hard telecine outputs. You must also set Interlace
24945// mode (interlaceMode) to a value other than Progressive (PROGRESSIVE).
24946const (
24947	// H265ScanTypeConversionModeInterlaced is a H265ScanTypeConversionMode enum value
24948	H265ScanTypeConversionModeInterlaced = "INTERLACED"
24949
24950	// H265ScanTypeConversionModeInterlacedOptimize is a H265ScanTypeConversionMode enum value
24951	H265ScanTypeConversionModeInterlacedOptimize = "INTERLACED_OPTIMIZE"
24952)
24953
24954// H265ScanTypeConversionMode_Values returns all elements of the H265ScanTypeConversionMode enum
24955func H265ScanTypeConversionMode_Values() []string {
24956	return []string{
24957		H265ScanTypeConversionModeInterlaced,
24958		H265ScanTypeConversionModeInterlacedOptimize,
24959	}
24960}
24961
24962// Enable this setting to insert I-frames at scene changes that the service
24963// automatically detects. This improves video quality and is enabled by default.
24964// If this output uses QVBR, choose Transition detection (TRANSITION_DETECTION)
24965// for further video quality improvement. For more information about QVBR, see
24966// https://docs.aws.amazon.com/console/mediaconvert/cbr-vbr-qvbr.
24967const (
24968	// H265SceneChangeDetectDisabled is a H265SceneChangeDetect enum value
24969	H265SceneChangeDetectDisabled = "DISABLED"
24970
24971	// H265SceneChangeDetectEnabled is a H265SceneChangeDetect enum value
24972	H265SceneChangeDetectEnabled = "ENABLED"
24973
24974	// H265SceneChangeDetectTransitionDetection is a H265SceneChangeDetect enum value
24975	H265SceneChangeDetectTransitionDetection = "TRANSITION_DETECTION"
24976)
24977
24978// H265SceneChangeDetect_Values returns all elements of the H265SceneChangeDetect enum
24979func H265SceneChangeDetect_Values() []string {
24980	return []string{
24981		H265SceneChangeDetectDisabled,
24982		H265SceneChangeDetectEnabled,
24983		H265SceneChangeDetectTransitionDetection,
24984	}
24985}
24986
24987// Ignore this setting unless your input frame rate is 23.976 or 24 frames per
24988// second (fps). Enable slow PAL to create a 25 fps output. When you enable
24989// slow PAL, MediaConvert relabels the video frames to 25 fps and resamples
24990// your audio to keep it synchronized with the video. Note that enabling this
24991// setting will slightly reduce the duration of your video. Required settings:
24992// You must also set Framerate to 25. In your JSON job specification, set (framerateControl)
24993// to (SPECIFIED), (framerateNumerator) to 25 and (framerateDenominator) to
24994// 1.
24995const (
24996	// H265SlowPalDisabled is a H265SlowPal enum value
24997	H265SlowPalDisabled = "DISABLED"
24998
24999	// H265SlowPalEnabled is a H265SlowPal enum value
25000	H265SlowPalEnabled = "ENABLED"
25001)
25002
25003// H265SlowPal_Values returns all elements of the H265SlowPal enum
25004func H265SlowPal_Values() []string {
25005	return []string{
25006		H265SlowPalDisabled,
25007		H265SlowPalEnabled,
25008	}
25009}
25010
25011// Keep the default value, Enabled (ENABLED), to adjust quantization within
25012// each frame based on spatial variation of content complexity. When you enable
25013// this feature, the encoder uses fewer bits on areas that can sustain more
25014// distortion with no noticeable visual degradation and uses more bits on areas
25015// where any small distortion will be noticeable. For example, complex textured
25016// blocks are encoded with fewer bits and smooth textured blocks are encoded
25017// with more bits. Enabling this feature will almost always improve your video
25018// quality. Note, though, that this feature doesn't take into account where
25019// the viewer's attention is likely to be. If viewers are likely to be focusing
25020// their attention on a part of the screen with a lot of complex texture, you
25021// might choose to disable this feature. Related setting: When you enable spatial
25022// adaptive quantization, set the value for Adaptive quantization (adaptiveQuantization)
25023// depending on your content. For homogeneous content, such as cartoons and
25024// video games, set it to Low. For content with a wider variety of textures,
25025// set it to High or Higher.
25026const (
25027	// H265SpatialAdaptiveQuantizationDisabled is a H265SpatialAdaptiveQuantization enum value
25028	H265SpatialAdaptiveQuantizationDisabled = "DISABLED"
25029
25030	// H265SpatialAdaptiveQuantizationEnabled is a H265SpatialAdaptiveQuantization enum value
25031	H265SpatialAdaptiveQuantizationEnabled = "ENABLED"
25032)
25033
25034// H265SpatialAdaptiveQuantization_Values returns all elements of the H265SpatialAdaptiveQuantization enum
25035func H265SpatialAdaptiveQuantization_Values() []string {
25036	return []string{
25037		H265SpatialAdaptiveQuantizationDisabled,
25038		H265SpatialAdaptiveQuantizationEnabled,
25039	}
25040}
25041
25042// This field applies only if the Streams > Advanced > Framerate (framerate)
25043// field is set to 29.970. This field works with the Streams > Advanced > Preprocessors
25044// > Deinterlacer field (deinterlace_mode) and the Streams > Advanced > Interlaced
25045// Mode field (interlace_mode) to identify the scan type for the output: Progressive,
25046// Interlaced, Hard Telecine or Soft Telecine. - Hard: produces 29.97i output
25047// from 23.976 input. - Soft: produces 23.976; the player converts this output
25048// to 29.97i.
25049const (
25050	// H265TelecineNone is a H265Telecine enum value
25051	H265TelecineNone = "NONE"
25052
25053	// H265TelecineSoft is a H265Telecine enum value
25054	H265TelecineSoft = "SOFT"
25055
25056	// H265TelecineHard is a H265Telecine enum value
25057	H265TelecineHard = "HARD"
25058)
25059
25060// H265Telecine_Values returns all elements of the H265Telecine enum
25061func H265Telecine_Values() []string {
25062	return []string{
25063		H265TelecineNone,
25064		H265TelecineSoft,
25065		H265TelecineHard,
25066	}
25067}
25068
25069// Keep the default value, Enabled (ENABLED), to adjust quantization within
25070// each frame based on temporal variation of content complexity. When you enable
25071// this feature, the encoder uses fewer bits on areas of the frame that aren't
25072// moving and uses more bits on complex objects with sharp edges that move a
25073// lot. For example, this feature improves the readability of text tickers on
25074// newscasts and scoreboards on sports matches. Enabling this feature will almost
25075// always improve your video quality. Note, though, that this feature doesn't
25076// take into account where the viewer's attention is likely to be. If viewers
25077// are likely to be focusing their attention on a part of the screen that doesn't
25078// have moving objects with sharp edges, such as sports athletes' faces, you
25079// might choose to disable this feature. Related setting: When you enable temporal
25080// quantization, adjust the strength of the filter with the setting Adaptive
25081// quantization (adaptiveQuantization).
25082const (
25083	// H265TemporalAdaptiveQuantizationDisabled is a H265TemporalAdaptiveQuantization enum value
25084	H265TemporalAdaptiveQuantizationDisabled = "DISABLED"
25085
25086	// H265TemporalAdaptiveQuantizationEnabled is a H265TemporalAdaptiveQuantization enum value
25087	H265TemporalAdaptiveQuantizationEnabled = "ENABLED"
25088)
25089
25090// H265TemporalAdaptiveQuantization_Values returns all elements of the H265TemporalAdaptiveQuantization enum
25091func H265TemporalAdaptiveQuantization_Values() []string {
25092	return []string{
25093		H265TemporalAdaptiveQuantizationDisabled,
25094		H265TemporalAdaptiveQuantizationEnabled,
25095	}
25096}
25097
25098// Enables temporal layer identifiers in the encoded bitstream. Up to 3 layers
25099// are supported depending on GOP structure: I- and P-frames form one layer,
25100// reference B-frames can form a second layer and non-reference b-frames can
25101// form a third layer. Decoders can optionally decode only the lower temporal
25102// layers to generate a lower frame rate output. For example, given a bitstream
25103// with temporal IDs and with b-frames = 1 (i.e. IbPbPb display order), a decoder
25104// could decode all the frames for full frame rate output or only the I and
25105// P frames (lowest temporal layer) for a half frame rate output.
25106const (
25107	// H265TemporalIdsDisabled is a H265TemporalIds enum value
25108	H265TemporalIdsDisabled = "DISABLED"
25109
25110	// H265TemporalIdsEnabled is a H265TemporalIds enum value
25111	H265TemporalIdsEnabled = "ENABLED"
25112)
25113
25114// H265TemporalIds_Values returns all elements of the H265TemporalIds enum
25115func H265TemporalIds_Values() []string {
25116	return []string{
25117		H265TemporalIdsDisabled,
25118		H265TemporalIdsEnabled,
25119	}
25120}
25121
25122// Enable use of tiles, allowing horizontal as well as vertical subdivision
25123// of the encoded pictures.
25124const (
25125	// H265TilesDisabled is a H265Tiles enum value
25126	H265TilesDisabled = "DISABLED"
25127
25128	// H265TilesEnabled is a H265Tiles enum value
25129	H265TilesEnabled = "ENABLED"
25130)
25131
25132// H265Tiles_Values returns all elements of the H265Tiles enum
25133func H265Tiles_Values() []string {
25134	return []string{
25135		H265TilesDisabled,
25136		H265TilesEnabled,
25137	}
25138}
25139
25140// Inserts timecode for each frame as 4 bytes of an unregistered SEI message.
25141const (
25142	// H265UnregisteredSeiTimecodeDisabled is a H265UnregisteredSeiTimecode enum value
25143	H265UnregisteredSeiTimecodeDisabled = "DISABLED"
25144
25145	// H265UnregisteredSeiTimecodeEnabled is a H265UnregisteredSeiTimecode enum value
25146	H265UnregisteredSeiTimecodeEnabled = "ENABLED"
25147)
25148
25149// H265UnregisteredSeiTimecode_Values returns all elements of the H265UnregisteredSeiTimecode enum
25150func H265UnregisteredSeiTimecode_Values() []string {
25151	return []string{
25152		H265UnregisteredSeiTimecodeDisabled,
25153		H265UnregisteredSeiTimecodeEnabled,
25154	}
25155}
25156
25157// If the location of parameter set NAL units doesn't matter in your workflow,
25158// ignore this setting. Use this setting only with CMAF or DASH outputs, or
25159// with standalone file outputs in an MPEG-4 container (MP4 outputs). Choose
25160// HVC1 to mark your output as HVC1. This makes your output compliant with the
25161// following specification: ISO IECJTC1 SC29 N13798 Text ISO/IEC FDIS 14496-15
25162// 3rd Edition. For these outputs, the service stores parameter set NAL units
25163// in the sample headers but not in the samples directly. For MP4 outputs, when
25164// you choose HVC1, your output video might not work properly with some downstream
25165// systems and video players. The service defaults to marking your output as
25166// HEV1. For these outputs, the service writes parameter set NAL units directly
25167// into the samples.
25168const (
25169	// H265WriteMp4PackagingTypeHvc1 is a H265WriteMp4PackagingType enum value
25170	H265WriteMp4PackagingTypeHvc1 = "HVC1"
25171
25172	// H265WriteMp4PackagingTypeHev1 is a H265WriteMp4PackagingType enum value
25173	H265WriteMp4PackagingTypeHev1 = "HEV1"
25174)
25175
25176// H265WriteMp4PackagingType_Values returns all elements of the H265WriteMp4PackagingType enum
25177func H265WriteMp4PackagingType_Values() []string {
25178	return []string{
25179		H265WriteMp4PackagingTypeHvc1,
25180		H265WriteMp4PackagingTypeHev1,
25181	}
25182}
25183
25184const (
25185	// HlsAdMarkersElemental is a HlsAdMarkers enum value
25186	HlsAdMarkersElemental = "ELEMENTAL"
25187
25188	// HlsAdMarkersElementalScte35 is a HlsAdMarkers enum value
25189	HlsAdMarkersElementalScte35 = "ELEMENTAL_SCTE35"
25190)
25191
25192// HlsAdMarkers_Values returns all elements of the HlsAdMarkers enum
25193func HlsAdMarkers_Values() []string {
25194	return []string{
25195		HlsAdMarkersElemental,
25196		HlsAdMarkersElementalScte35,
25197	}
25198}
25199
25200// Use this setting only in audio-only outputs. Choose MPEG-2 Transport Stream
25201// (M2TS) to create a file in an MPEG2-TS container. Keep the default value
25202// Automatic (AUTOMATIC) to create a raw audio-only file with no container.
25203// Regardless of the value that you specify here, if this output has video,
25204// the service will place outputs into an MPEG2-TS container.
25205const (
25206	// HlsAudioOnlyContainerAutomatic is a HlsAudioOnlyContainer enum value
25207	HlsAudioOnlyContainerAutomatic = "AUTOMATIC"
25208
25209	// HlsAudioOnlyContainerM2ts is a HlsAudioOnlyContainer enum value
25210	HlsAudioOnlyContainerM2ts = "M2TS"
25211)
25212
25213// HlsAudioOnlyContainer_Values returns all elements of the HlsAudioOnlyContainer enum
25214func HlsAudioOnlyContainer_Values() []string {
25215	return []string{
25216		HlsAudioOnlyContainerAutomatic,
25217		HlsAudioOnlyContainerM2ts,
25218	}
25219}
25220
25221// Ignore this setting unless you are using FairPlay DRM with Verimatrix and
25222// you encounter playback issues. Keep the default value, Include (INCLUDE),
25223// to output audio-only headers. Choose Exclude (EXCLUDE) to remove the audio-only
25224// headers from your audio segments.
25225const (
25226	// HlsAudioOnlyHeaderInclude is a HlsAudioOnlyHeader enum value
25227	HlsAudioOnlyHeaderInclude = "INCLUDE"
25228
25229	// HlsAudioOnlyHeaderExclude is a HlsAudioOnlyHeader enum value
25230	HlsAudioOnlyHeaderExclude = "EXCLUDE"
25231)
25232
25233// HlsAudioOnlyHeader_Values returns all elements of the HlsAudioOnlyHeader enum
25234func HlsAudioOnlyHeader_Values() []string {
25235	return []string{
25236		HlsAudioOnlyHeaderInclude,
25237		HlsAudioOnlyHeaderExclude,
25238	}
25239}
25240
25241// Four types of audio-only tracks are supported: Audio-Only Variant Stream
25242// The client can play back this audio-only stream instead of video in low-bandwidth
25243// scenarios. Represented as an EXT-X-STREAM-INF in the HLS manifest. Alternate
25244// Audio, Auto Select, Default Alternate rendition that the client should try
25245// to play back by default. Represented as an EXT-X-MEDIA in the HLS manifest
25246// with DEFAULT=YES, AUTOSELECT=YES Alternate Audio, Auto Select, Not Default
25247// Alternate rendition that the client may try to play back by default. Represented
25248// as an EXT-X-MEDIA in the HLS manifest with DEFAULT=NO, AUTOSELECT=YES Alternate
25249// Audio, not Auto Select Alternate rendition that the client will not try to
25250// play back by default. Represented as an EXT-X-MEDIA in the HLS manifest with
25251// DEFAULT=NO, AUTOSELECT=NO
25252const (
25253	// HlsAudioTrackTypeAlternateAudioAutoSelectDefault is a HlsAudioTrackType enum value
25254	HlsAudioTrackTypeAlternateAudioAutoSelectDefault = "ALTERNATE_AUDIO_AUTO_SELECT_DEFAULT"
25255
25256	// HlsAudioTrackTypeAlternateAudioAutoSelect is a HlsAudioTrackType enum value
25257	HlsAudioTrackTypeAlternateAudioAutoSelect = "ALTERNATE_AUDIO_AUTO_SELECT"
25258
25259	// HlsAudioTrackTypeAlternateAudioNotAutoSelect is a HlsAudioTrackType enum value
25260	HlsAudioTrackTypeAlternateAudioNotAutoSelect = "ALTERNATE_AUDIO_NOT_AUTO_SELECT"
25261
25262	// HlsAudioTrackTypeAudioOnlyVariantStream is a HlsAudioTrackType enum value
25263	HlsAudioTrackTypeAudioOnlyVariantStream = "AUDIO_ONLY_VARIANT_STREAM"
25264)
25265
25266// HlsAudioTrackType_Values returns all elements of the HlsAudioTrackType enum
25267func HlsAudioTrackType_Values() []string {
25268	return []string{
25269		HlsAudioTrackTypeAlternateAudioAutoSelectDefault,
25270		HlsAudioTrackTypeAlternateAudioAutoSelect,
25271		HlsAudioTrackTypeAlternateAudioNotAutoSelect,
25272		HlsAudioTrackTypeAudioOnlyVariantStream,
25273	}
25274}
25275
25276// Applies only to 608 Embedded output captions. Insert: Include CLOSED-CAPTIONS
25277// lines in the manifest. Specify at least one language in the CC1 Language
25278// Code field. One CLOSED-CAPTION line is added for each Language Code you specify.
25279// Make sure to specify the languages in the order in which they appear in the
25280// original source (if the source is embedded format) or the order of the caption
25281// selectors (if the source is other than embedded). Otherwise, languages in
25282// the manifest will not match up properly with the output captions. None: Include
25283// CLOSED-CAPTIONS=NONE line in the manifest. Omit: Omit any CLOSED-CAPTIONS
25284// line from the manifest.
25285const (
25286	// HlsCaptionLanguageSettingInsert is a HlsCaptionLanguageSetting enum value
25287	HlsCaptionLanguageSettingInsert = "INSERT"
25288
25289	// HlsCaptionLanguageSettingOmit is a HlsCaptionLanguageSetting enum value
25290	HlsCaptionLanguageSettingOmit = "OMIT"
25291
25292	// HlsCaptionLanguageSettingNone is a HlsCaptionLanguageSetting enum value
25293	HlsCaptionLanguageSettingNone = "NONE"
25294)
25295
25296// HlsCaptionLanguageSetting_Values returns all elements of the HlsCaptionLanguageSetting enum
25297func HlsCaptionLanguageSetting_Values() []string {
25298	return []string{
25299		HlsCaptionLanguageSettingInsert,
25300		HlsCaptionLanguageSettingOmit,
25301		HlsCaptionLanguageSettingNone,
25302	}
25303}
25304
25305// Disable this setting only when your workflow requires the #EXT-X-ALLOW-CACHE:no
25306// tag. Otherwise, keep the default value Enabled (ENABLED) and control caching
25307// in your video distribution set up. For example, use the Cache-Control http
25308// header.
25309const (
25310	// HlsClientCacheDisabled is a HlsClientCache enum value
25311	HlsClientCacheDisabled = "DISABLED"
25312
25313	// HlsClientCacheEnabled is a HlsClientCache enum value
25314	HlsClientCacheEnabled = "ENABLED"
25315)
25316
25317// HlsClientCache_Values returns all elements of the HlsClientCache enum
25318func HlsClientCache_Values() []string {
25319	return []string{
25320		HlsClientCacheDisabled,
25321		HlsClientCacheEnabled,
25322	}
25323}
25324
25325// Specification to use (RFC-6381 or the default RFC-4281) during m3u8 playlist
25326// generation.
25327const (
25328	// HlsCodecSpecificationRfc6381 is a HlsCodecSpecification enum value
25329	HlsCodecSpecificationRfc6381 = "RFC_6381"
25330
25331	// HlsCodecSpecificationRfc4281 is a HlsCodecSpecification enum value
25332	HlsCodecSpecificationRfc4281 = "RFC_4281"
25333)
25334
25335// HlsCodecSpecification_Values returns all elements of the HlsCodecSpecification enum
25336func HlsCodecSpecification_Values() []string {
25337	return []string{
25338		HlsCodecSpecificationRfc6381,
25339		HlsCodecSpecificationRfc4281,
25340	}
25341}
25342
25343// Indicates whether segments should be placed in subdirectories.
25344const (
25345	// HlsDirectoryStructureSingleDirectory is a HlsDirectoryStructure enum value
25346	HlsDirectoryStructureSingleDirectory = "SINGLE_DIRECTORY"
25347
25348	// HlsDirectoryStructureSubdirectoryPerStream is a HlsDirectoryStructure enum value
25349	HlsDirectoryStructureSubdirectoryPerStream = "SUBDIRECTORY_PER_STREAM"
25350)
25351
25352// HlsDirectoryStructure_Values returns all elements of the HlsDirectoryStructure enum
25353func HlsDirectoryStructure_Values() []string {
25354	return []string{
25355		HlsDirectoryStructureSingleDirectory,
25356		HlsDirectoryStructureSubdirectoryPerStream,
25357	}
25358}
25359
25360// Encrypts the segments with the given encryption scheme. Leave blank to disable.
25361// Selecting 'Disabled' in the web interface also disables encryption.
25362const (
25363	// HlsEncryptionTypeAes128 is a HlsEncryptionType enum value
25364	HlsEncryptionTypeAes128 = "AES128"
25365
25366	// HlsEncryptionTypeSampleAes is a HlsEncryptionType enum value
25367	HlsEncryptionTypeSampleAes = "SAMPLE_AES"
25368)
25369
25370// HlsEncryptionType_Values returns all elements of the HlsEncryptionType enum
25371func HlsEncryptionType_Values() []string {
25372	return []string{
25373		HlsEncryptionTypeAes128,
25374		HlsEncryptionTypeSampleAes,
25375	}
25376}
25377
25378// Choose Include (INCLUDE) to have MediaConvert generate a child manifest that
25379// lists only the I-frames for this rendition, in addition to your regular manifest
25380// for this rendition. You might use this manifest as part of a workflow that
25381// creates preview functions for your video. MediaConvert adds both the I-frame
25382// only child manifest and the regular child manifest to the parent manifest.
25383// When you don't need the I-frame only child manifest, keep the default value
25384// Exclude (EXCLUDE).
25385const (
25386	// HlsIFrameOnlyManifestInclude is a HlsIFrameOnlyManifest enum value
25387	HlsIFrameOnlyManifestInclude = "INCLUDE"
25388
25389	// HlsIFrameOnlyManifestExclude is a HlsIFrameOnlyManifest enum value
25390	HlsIFrameOnlyManifestExclude = "EXCLUDE"
25391)
25392
25393// HlsIFrameOnlyManifest_Values returns all elements of the HlsIFrameOnlyManifest enum
25394func HlsIFrameOnlyManifest_Values() []string {
25395	return []string{
25396		HlsIFrameOnlyManifestInclude,
25397		HlsIFrameOnlyManifestExclude,
25398	}
25399}
25400
25401// The Initialization Vector is a 128-bit number used in conjunction with the
25402// key for encrypting blocks. If set to INCLUDE, Initialization Vector is listed
25403// in the manifest. Otherwise Initialization Vector is not in the manifest.
25404const (
25405	// HlsInitializationVectorInManifestInclude is a HlsInitializationVectorInManifest enum value
25406	HlsInitializationVectorInManifestInclude = "INCLUDE"
25407
25408	// HlsInitializationVectorInManifestExclude is a HlsInitializationVectorInManifest enum value
25409	HlsInitializationVectorInManifestExclude = "EXCLUDE"
25410)
25411
25412// HlsInitializationVectorInManifest_Values returns all elements of the HlsInitializationVectorInManifest enum
25413func HlsInitializationVectorInManifest_Values() []string {
25414	return []string{
25415		HlsInitializationVectorInManifestInclude,
25416		HlsInitializationVectorInManifestExclude,
25417	}
25418}
25419
25420// Specify whether your DRM encryption key is static or from a key provider
25421// that follows the SPEKE standard. For more information about SPEKE, see https://docs.aws.amazon.com/speke/latest/documentation/what-is-speke.html.
25422const (
25423	// HlsKeyProviderTypeSpeke is a HlsKeyProviderType enum value
25424	HlsKeyProviderTypeSpeke = "SPEKE"
25425
25426	// HlsKeyProviderTypeStaticKey is a HlsKeyProviderType enum value
25427	HlsKeyProviderTypeStaticKey = "STATIC_KEY"
25428)
25429
25430// HlsKeyProviderType_Values returns all elements of the HlsKeyProviderType enum
25431func HlsKeyProviderType_Values() []string {
25432	return []string{
25433		HlsKeyProviderTypeSpeke,
25434		HlsKeyProviderTypeStaticKey,
25435	}
25436}
25437
25438// When set to GZIP, compresses HLS playlist.
25439const (
25440	// HlsManifestCompressionGzip is a HlsManifestCompression enum value
25441	HlsManifestCompressionGzip = "GZIP"
25442
25443	// HlsManifestCompressionNone is a HlsManifestCompression enum value
25444	HlsManifestCompressionNone = "NONE"
25445)
25446
25447// HlsManifestCompression_Values returns all elements of the HlsManifestCompression enum
25448func HlsManifestCompression_Values() []string {
25449	return []string{
25450		HlsManifestCompressionGzip,
25451		HlsManifestCompressionNone,
25452	}
25453}
25454
25455// Indicates whether the output manifest should use floating point values for
25456// segment duration.
25457const (
25458	// HlsManifestDurationFormatFloatingPoint is a HlsManifestDurationFormat enum value
25459	HlsManifestDurationFormatFloatingPoint = "FLOATING_POINT"
25460
25461	// HlsManifestDurationFormatInteger is a HlsManifestDurationFormat enum value
25462	HlsManifestDurationFormatInteger = "INTEGER"
25463)
25464
25465// HlsManifestDurationFormat_Values returns all elements of the HlsManifestDurationFormat enum
25466func HlsManifestDurationFormat_Values() []string {
25467	return []string{
25468		HlsManifestDurationFormatFloatingPoint,
25469		HlsManifestDurationFormatInteger,
25470	}
25471}
25472
25473// Enable this setting to insert the EXT-X-SESSION-KEY element into the master
25474// playlist. This allows for offline Apple HLS FairPlay content protection.
25475const (
25476	// HlsOfflineEncryptedEnabled is a HlsOfflineEncrypted enum value
25477	HlsOfflineEncryptedEnabled = "ENABLED"
25478
25479	// HlsOfflineEncryptedDisabled is a HlsOfflineEncrypted enum value
25480	HlsOfflineEncryptedDisabled = "DISABLED"
25481)
25482
25483// HlsOfflineEncrypted_Values returns all elements of the HlsOfflineEncrypted enum
25484func HlsOfflineEncrypted_Values() []string {
25485	return []string{
25486		HlsOfflineEncryptedEnabled,
25487		HlsOfflineEncryptedDisabled,
25488	}
25489}
25490
25491// Indicates whether the .m3u8 manifest file should be generated for this HLS
25492// output group.
25493const (
25494	// HlsOutputSelectionManifestsAndSegments is a HlsOutputSelection enum value
25495	HlsOutputSelectionManifestsAndSegments = "MANIFESTS_AND_SEGMENTS"
25496
25497	// HlsOutputSelectionSegmentsOnly is a HlsOutputSelection enum value
25498	HlsOutputSelectionSegmentsOnly = "SEGMENTS_ONLY"
25499)
25500
25501// HlsOutputSelection_Values returns all elements of the HlsOutputSelection enum
25502func HlsOutputSelection_Values() []string {
25503	return []string{
25504		HlsOutputSelectionManifestsAndSegments,
25505		HlsOutputSelectionSegmentsOnly,
25506	}
25507}
25508
25509// Includes or excludes EXT-X-PROGRAM-DATE-TIME tag in .m3u8 manifest files.
25510// The value is calculated as follows: either the program date and time are
25511// initialized using the input timecode source, or the time is initialized using
25512// the input timecode source and the date is initialized using the timestamp_offset.
25513const (
25514	// HlsProgramDateTimeInclude is a HlsProgramDateTime enum value
25515	HlsProgramDateTimeInclude = "INCLUDE"
25516
25517	// HlsProgramDateTimeExclude is a HlsProgramDateTime enum value
25518	HlsProgramDateTimeExclude = "EXCLUDE"
25519)
25520
25521// HlsProgramDateTime_Values returns all elements of the HlsProgramDateTime enum
25522func HlsProgramDateTime_Values() []string {
25523	return []string{
25524		HlsProgramDateTimeInclude,
25525		HlsProgramDateTimeExclude,
25526	}
25527}
25528
25529// When set to SINGLE_FILE, emits program as a single media resource (.ts) file,
25530// uses #EXT-X-BYTERANGE tags to index segment for playback.
25531const (
25532	// HlsSegmentControlSingleFile is a HlsSegmentControl enum value
25533	HlsSegmentControlSingleFile = "SINGLE_FILE"
25534
25535	// HlsSegmentControlSegmentedFiles is a HlsSegmentControl enum value
25536	HlsSegmentControlSegmentedFiles = "SEGMENTED_FILES"
25537)
25538
25539// HlsSegmentControl_Values returns all elements of the HlsSegmentControl enum
25540func HlsSegmentControl_Values() []string {
25541	return []string{
25542		HlsSegmentControlSingleFile,
25543		HlsSegmentControlSegmentedFiles,
25544	}
25545}
25546
25547// Include or exclude RESOLUTION attribute for video in EXT-X-STREAM-INF tag
25548// of variant manifest.
25549const (
25550	// HlsStreamInfResolutionInclude is a HlsStreamInfResolution enum value
25551	HlsStreamInfResolutionInclude = "INCLUDE"
25552
25553	// HlsStreamInfResolutionExclude is a HlsStreamInfResolution enum value
25554	HlsStreamInfResolutionExclude = "EXCLUDE"
25555)
25556
25557// HlsStreamInfResolution_Values returns all elements of the HlsStreamInfResolution enum
25558func HlsStreamInfResolution_Values() []string {
25559	return []string{
25560		HlsStreamInfResolutionInclude,
25561		HlsStreamInfResolutionExclude,
25562	}
25563}
25564
25565// Indicates ID3 frame that has the timecode.
25566const (
25567	// HlsTimedMetadataId3FrameNone is a HlsTimedMetadataId3Frame enum value
25568	HlsTimedMetadataId3FrameNone = "NONE"
25569
25570	// HlsTimedMetadataId3FramePriv is a HlsTimedMetadataId3Frame enum value
25571	HlsTimedMetadataId3FramePriv = "PRIV"
25572
25573	// HlsTimedMetadataId3FrameTdrl is a HlsTimedMetadataId3Frame enum value
25574	HlsTimedMetadataId3FrameTdrl = "TDRL"
25575)
25576
25577// HlsTimedMetadataId3Frame_Values returns all elements of the HlsTimedMetadataId3Frame enum
25578func HlsTimedMetadataId3Frame_Values() []string {
25579	return []string{
25580		HlsTimedMetadataId3FrameNone,
25581		HlsTimedMetadataId3FramePriv,
25582		HlsTimedMetadataId3FrameTdrl,
25583	}
25584}
25585
25586// Keep this setting enabled to have MediaConvert use the font style and position
25587// information from the captions source in the output. This option is available
25588// only when your input captions are IMSC, SMPTE-TT, or TTML. Disable this setting
25589// for simplified output captions.
25590const (
25591	// ImscStylePassthroughEnabled is a ImscStylePassthrough enum value
25592	ImscStylePassthroughEnabled = "ENABLED"
25593
25594	// ImscStylePassthroughDisabled is a ImscStylePassthrough enum value
25595	ImscStylePassthroughDisabled = "DISABLED"
25596)
25597
25598// ImscStylePassthrough_Values returns all elements of the ImscStylePassthrough enum
25599func ImscStylePassthrough_Values() []string {
25600	return []string{
25601		ImscStylePassthroughEnabled,
25602		ImscStylePassthroughDisabled,
25603	}
25604}
25605
25606// Enable Deblock (InputDeblockFilter) to produce smoother motion in the output.
25607// Default is disabled. Only manually controllable for MPEG2 and uncompressed
25608// video inputs.
25609const (
25610	// InputDeblockFilterEnabled is a InputDeblockFilter enum value
25611	InputDeblockFilterEnabled = "ENABLED"
25612
25613	// InputDeblockFilterDisabled is a InputDeblockFilter enum value
25614	InputDeblockFilterDisabled = "DISABLED"
25615)
25616
25617// InputDeblockFilter_Values returns all elements of the InputDeblockFilter enum
25618func InputDeblockFilter_Values() []string {
25619	return []string{
25620		InputDeblockFilterEnabled,
25621		InputDeblockFilterDisabled,
25622	}
25623}
25624
25625// Enable Denoise (InputDenoiseFilter) to filter noise from the input. Default
25626// is disabled. Only applicable to MPEG2, H.264, H.265, and uncompressed video
25627// inputs.
25628const (
25629	// InputDenoiseFilterEnabled is a InputDenoiseFilter enum value
25630	InputDenoiseFilterEnabled = "ENABLED"
25631
25632	// InputDenoiseFilterDisabled is a InputDenoiseFilter enum value
25633	InputDenoiseFilterDisabled = "DISABLED"
25634)
25635
25636// InputDenoiseFilter_Values returns all elements of the InputDenoiseFilter enum
25637func InputDenoiseFilter_Values() []string {
25638	return []string{
25639		InputDenoiseFilterEnabled,
25640		InputDenoiseFilterDisabled,
25641	}
25642}
25643
25644// Specify how the transcoding service applies the denoise and deblock filters.
25645// You must also enable the filters separately, with Denoise (InputDenoiseFilter)
25646// and Deblock (InputDeblockFilter). * Auto - The transcoding service determines
25647// whether to apply filtering, depending on input type and quality. * Disable
25648// - The input is not filtered. This is true even if you use the API to enable
25649// them in (InputDeblockFilter) and (InputDeblockFilter). * Force - The input
25650// is filtered regardless of input type.
25651const (
25652	// InputFilterEnableAuto is a InputFilterEnable enum value
25653	InputFilterEnableAuto = "AUTO"
25654
25655	// InputFilterEnableDisable is a InputFilterEnable enum value
25656	InputFilterEnableDisable = "DISABLE"
25657
25658	// InputFilterEnableForce is a InputFilterEnable enum value
25659	InputFilterEnableForce = "FORCE"
25660)
25661
25662// InputFilterEnable_Values returns all elements of the InputFilterEnable enum
25663func InputFilterEnable_Values() []string {
25664	return []string{
25665		InputFilterEnableAuto,
25666		InputFilterEnableDisable,
25667		InputFilterEnableForce,
25668	}
25669}
25670
25671// Set PSI control (InputPsiControl) for transport stream inputs to specify
25672// which data the demux process to scans. * Ignore PSI - Scan all PIDs for audio
25673// and video. * Use PSI - Scan only PSI data.
25674const (
25675	// InputPsiControlIgnorePsi is a InputPsiControl enum value
25676	InputPsiControlIgnorePsi = "IGNORE_PSI"
25677
25678	// InputPsiControlUsePsi is a InputPsiControl enum value
25679	InputPsiControlUsePsi = "USE_PSI"
25680)
25681
25682// InputPsiControl_Values returns all elements of the InputPsiControl enum
25683func InputPsiControl_Values() []string {
25684	return []string{
25685		InputPsiControlIgnorePsi,
25686		InputPsiControlUsePsi,
25687	}
25688}
25689
25690// Use Rotate (InputRotate) to specify how the service rotates your video. You
25691// can choose automatic rotation or specify a rotation. You can specify a clockwise
25692// rotation of 0, 90, 180, or 270 degrees. If your input video container is
25693// .mov or .mp4 and your input has rotation metadata, you can choose Automatic
25694// to have the service rotate your video according to the rotation specified
25695// in the metadata. The rotation must be within one degree of 90, 180, or 270
25696// degrees. If the rotation metadata specifies any other rotation, the service
25697// will default to no rotation. By default, the service does no rotation, even
25698// if your input video has rotation metadata. The service doesn't pass through
25699// rotation metadata.
25700const (
25701	// InputRotateDegree0 is a InputRotate enum value
25702	InputRotateDegree0 = "DEGREE_0"
25703
25704	// InputRotateDegrees90 is a InputRotate enum value
25705	InputRotateDegrees90 = "DEGREES_90"
25706
25707	// InputRotateDegrees180 is a InputRotate enum value
25708	InputRotateDegrees180 = "DEGREES_180"
25709
25710	// InputRotateDegrees270 is a InputRotate enum value
25711	InputRotateDegrees270 = "DEGREES_270"
25712
25713	// InputRotateAuto is a InputRotate enum value
25714	InputRotateAuto = "AUTO"
25715)
25716
25717// InputRotate_Values returns all elements of the InputRotate enum
25718func InputRotate_Values() []string {
25719	return []string{
25720		InputRotateDegree0,
25721		InputRotateDegrees90,
25722		InputRotateDegrees180,
25723		InputRotateDegrees270,
25724		InputRotateAuto,
25725	}
25726}
25727
25728// When you have a progressive segmented frame (PsF) input, use this setting
25729// to flag the input as PsF. MediaConvert doesn't automatically detect PsF.
25730// Therefore, flagging your input as PsF results in better preservation of video
25731// quality when you do deinterlacing and frame rate conversion. If you don't
25732// specify, the default value is Auto (AUTO). Auto is the correct setting for
25733// all inputs that are not PsF. Don't set this value to PsF when your input
25734// is interlaced. Doing so creates horizontal interlacing artifacts.
25735const (
25736	// InputScanTypeAuto is a InputScanType enum value
25737	InputScanTypeAuto = "AUTO"
25738
25739	// InputScanTypePsf is a InputScanType enum value
25740	InputScanTypePsf = "PSF"
25741)
25742
25743// InputScanType_Values returns all elements of the InputScanType enum
25744func InputScanType_Values() []string {
25745	return []string{
25746		InputScanTypeAuto,
25747		InputScanTypePsf,
25748	}
25749}
25750
25751// Use this Timecode source setting, located under the input settings (InputTimecodeSource),
25752// to specify how the service counts input video frames. This input frame count
25753// affects only the behavior of features that apply to a single input at a time,
25754// such as input clipping and synchronizing some captions formats. Choose Embedded
25755// (EMBEDDED) to use the timecodes in your input video. Choose Start at zero
25756// (ZEROBASED) to start the first frame at zero. Choose Specified start (SPECIFIEDSTART)
25757// to start the first frame at the timecode that you specify in the setting
25758// Start timecode (timecodeStart). If you don't specify a value for Timecode
25759// source, the service will use Embedded by default. For more information about
25760// timecodes, see https://docs.aws.amazon.com/console/mediaconvert/timecode.
25761const (
25762	// InputTimecodeSourceEmbedded is a InputTimecodeSource enum value
25763	InputTimecodeSourceEmbedded = "EMBEDDED"
25764
25765	// InputTimecodeSourceZerobased is a InputTimecodeSource enum value
25766	InputTimecodeSourceZerobased = "ZEROBASED"
25767
25768	// InputTimecodeSourceSpecifiedstart is a InputTimecodeSource enum value
25769	InputTimecodeSourceSpecifiedstart = "SPECIFIEDSTART"
25770)
25771
25772// InputTimecodeSource_Values returns all elements of the InputTimecodeSource enum
25773func InputTimecodeSource_Values() []string {
25774	return []string{
25775		InputTimecodeSourceEmbedded,
25776		InputTimecodeSourceZerobased,
25777		InputTimecodeSourceSpecifiedstart,
25778	}
25779}
25780
25781// A job's phase can be PROBING, TRANSCODING OR UPLOADING
25782const (
25783	// JobPhaseProbing is a JobPhase enum value
25784	JobPhaseProbing = "PROBING"
25785
25786	// JobPhaseTranscoding is a JobPhase enum value
25787	JobPhaseTranscoding = "TRANSCODING"
25788
25789	// JobPhaseUploading is a JobPhase enum value
25790	JobPhaseUploading = "UPLOADING"
25791)
25792
25793// JobPhase_Values returns all elements of the JobPhase enum
25794func JobPhase_Values() []string {
25795	return []string{
25796		JobPhaseProbing,
25797		JobPhaseTranscoding,
25798		JobPhaseUploading,
25799	}
25800}
25801
25802// A job's status can be SUBMITTED, PROGRESSING, COMPLETE, CANCELED, or ERROR.
25803const (
25804	// JobStatusSubmitted is a JobStatus enum value
25805	JobStatusSubmitted = "SUBMITTED"
25806
25807	// JobStatusProgressing is a JobStatus enum value
25808	JobStatusProgressing = "PROGRESSING"
25809
25810	// JobStatusComplete is a JobStatus enum value
25811	JobStatusComplete = "COMPLETE"
25812
25813	// JobStatusCanceled is a JobStatus enum value
25814	JobStatusCanceled = "CANCELED"
25815
25816	// JobStatusError is a JobStatus enum value
25817	JobStatusError = "ERROR"
25818)
25819
25820// JobStatus_Values returns all elements of the JobStatus enum
25821func JobStatus_Values() []string {
25822	return []string{
25823		JobStatusSubmitted,
25824		JobStatusProgressing,
25825		JobStatusComplete,
25826		JobStatusCanceled,
25827		JobStatusError,
25828	}
25829}
25830
25831// Optional. When you request a list of job templates, you can choose to list
25832// them alphabetically by NAME or chronologically by CREATION_DATE. If you don't
25833// specify, the service will list them by name.
25834const (
25835	// JobTemplateListByName is a JobTemplateListBy enum value
25836	JobTemplateListByName = "NAME"
25837
25838	// JobTemplateListByCreationDate is a JobTemplateListBy enum value
25839	JobTemplateListByCreationDate = "CREATION_DATE"
25840
25841	// JobTemplateListBySystem is a JobTemplateListBy enum value
25842	JobTemplateListBySystem = "SYSTEM"
25843)
25844
25845// JobTemplateListBy_Values returns all elements of the JobTemplateListBy enum
25846func JobTemplateListBy_Values() []string {
25847	return []string{
25848		JobTemplateListByName,
25849		JobTemplateListByCreationDate,
25850		JobTemplateListBySystem,
25851	}
25852}
25853
25854// Specify the language, using the ISO 639-2 three-letter code listed at https://www.loc.gov/standards/iso639-2/php/code_list.php.
25855const (
25856	// LanguageCodeEng is a LanguageCode enum value
25857	LanguageCodeEng = "ENG"
25858
25859	// LanguageCodeSpa is a LanguageCode enum value
25860	LanguageCodeSpa = "SPA"
25861
25862	// LanguageCodeFra is a LanguageCode enum value
25863	LanguageCodeFra = "FRA"
25864
25865	// LanguageCodeDeu is a LanguageCode enum value
25866	LanguageCodeDeu = "DEU"
25867
25868	// LanguageCodeGer is a LanguageCode enum value
25869	LanguageCodeGer = "GER"
25870
25871	// LanguageCodeZho is a LanguageCode enum value
25872	LanguageCodeZho = "ZHO"
25873
25874	// LanguageCodeAra is a LanguageCode enum value
25875	LanguageCodeAra = "ARA"
25876
25877	// LanguageCodeHin is a LanguageCode enum value
25878	LanguageCodeHin = "HIN"
25879
25880	// LanguageCodeJpn is a LanguageCode enum value
25881	LanguageCodeJpn = "JPN"
25882
25883	// LanguageCodeRus is a LanguageCode enum value
25884	LanguageCodeRus = "RUS"
25885
25886	// LanguageCodePor is a LanguageCode enum value
25887	LanguageCodePor = "POR"
25888
25889	// LanguageCodeIta is a LanguageCode enum value
25890	LanguageCodeIta = "ITA"
25891
25892	// LanguageCodeUrd is a LanguageCode enum value
25893	LanguageCodeUrd = "URD"
25894
25895	// LanguageCodeVie is a LanguageCode enum value
25896	LanguageCodeVie = "VIE"
25897
25898	// LanguageCodeKor is a LanguageCode enum value
25899	LanguageCodeKor = "KOR"
25900
25901	// LanguageCodePan is a LanguageCode enum value
25902	LanguageCodePan = "PAN"
25903
25904	// LanguageCodeAbk is a LanguageCode enum value
25905	LanguageCodeAbk = "ABK"
25906
25907	// LanguageCodeAar is a LanguageCode enum value
25908	LanguageCodeAar = "AAR"
25909
25910	// LanguageCodeAfr is a LanguageCode enum value
25911	LanguageCodeAfr = "AFR"
25912
25913	// LanguageCodeAka is a LanguageCode enum value
25914	LanguageCodeAka = "AKA"
25915
25916	// LanguageCodeSqi is a LanguageCode enum value
25917	LanguageCodeSqi = "SQI"
25918
25919	// LanguageCodeAmh is a LanguageCode enum value
25920	LanguageCodeAmh = "AMH"
25921
25922	// LanguageCodeArg is a LanguageCode enum value
25923	LanguageCodeArg = "ARG"
25924
25925	// LanguageCodeHye is a LanguageCode enum value
25926	LanguageCodeHye = "HYE"
25927
25928	// LanguageCodeAsm is a LanguageCode enum value
25929	LanguageCodeAsm = "ASM"
25930
25931	// LanguageCodeAva is a LanguageCode enum value
25932	LanguageCodeAva = "AVA"
25933
25934	// LanguageCodeAve is a LanguageCode enum value
25935	LanguageCodeAve = "AVE"
25936
25937	// LanguageCodeAym is a LanguageCode enum value
25938	LanguageCodeAym = "AYM"
25939
25940	// LanguageCodeAze is a LanguageCode enum value
25941	LanguageCodeAze = "AZE"
25942
25943	// LanguageCodeBam is a LanguageCode enum value
25944	LanguageCodeBam = "BAM"
25945
25946	// LanguageCodeBak is a LanguageCode enum value
25947	LanguageCodeBak = "BAK"
25948
25949	// LanguageCodeEus is a LanguageCode enum value
25950	LanguageCodeEus = "EUS"
25951
25952	// LanguageCodeBel is a LanguageCode enum value
25953	LanguageCodeBel = "BEL"
25954
25955	// LanguageCodeBen is a LanguageCode enum value
25956	LanguageCodeBen = "BEN"
25957
25958	// LanguageCodeBih is a LanguageCode enum value
25959	LanguageCodeBih = "BIH"
25960
25961	// LanguageCodeBis is a LanguageCode enum value
25962	LanguageCodeBis = "BIS"
25963
25964	// LanguageCodeBos is a LanguageCode enum value
25965	LanguageCodeBos = "BOS"
25966
25967	// LanguageCodeBre is a LanguageCode enum value
25968	LanguageCodeBre = "BRE"
25969
25970	// LanguageCodeBul is a LanguageCode enum value
25971	LanguageCodeBul = "BUL"
25972
25973	// LanguageCodeMya is a LanguageCode enum value
25974	LanguageCodeMya = "MYA"
25975
25976	// LanguageCodeCat is a LanguageCode enum value
25977	LanguageCodeCat = "CAT"
25978
25979	// LanguageCodeKhm is a LanguageCode enum value
25980	LanguageCodeKhm = "KHM"
25981
25982	// LanguageCodeCha is a LanguageCode enum value
25983	LanguageCodeCha = "CHA"
25984
25985	// LanguageCodeChe is a LanguageCode enum value
25986	LanguageCodeChe = "CHE"
25987
25988	// LanguageCodeNya is a LanguageCode enum value
25989	LanguageCodeNya = "NYA"
25990
25991	// LanguageCodeChu is a LanguageCode enum value
25992	LanguageCodeChu = "CHU"
25993
25994	// LanguageCodeChv is a LanguageCode enum value
25995	LanguageCodeChv = "CHV"
25996
25997	// LanguageCodeCor is a LanguageCode enum value
25998	LanguageCodeCor = "COR"
25999
26000	// LanguageCodeCos is a LanguageCode enum value
26001	LanguageCodeCos = "COS"
26002
26003	// LanguageCodeCre is a LanguageCode enum value
26004	LanguageCodeCre = "CRE"
26005
26006	// LanguageCodeHrv is a LanguageCode enum value
26007	LanguageCodeHrv = "HRV"
26008
26009	// LanguageCodeCes is a LanguageCode enum value
26010	LanguageCodeCes = "CES"
26011
26012	// LanguageCodeDan is a LanguageCode enum value
26013	LanguageCodeDan = "DAN"
26014
26015	// LanguageCodeDiv is a LanguageCode enum value
26016	LanguageCodeDiv = "DIV"
26017
26018	// LanguageCodeNld is a LanguageCode enum value
26019	LanguageCodeNld = "NLD"
26020
26021	// LanguageCodeDzo is a LanguageCode enum value
26022	LanguageCodeDzo = "DZO"
26023
26024	// LanguageCodeEnm is a LanguageCode enum value
26025	LanguageCodeEnm = "ENM"
26026
26027	// LanguageCodeEpo is a LanguageCode enum value
26028	LanguageCodeEpo = "EPO"
26029
26030	// LanguageCodeEst is a LanguageCode enum value
26031	LanguageCodeEst = "EST"
26032
26033	// LanguageCodeEwe is a LanguageCode enum value
26034	LanguageCodeEwe = "EWE"
26035
26036	// LanguageCodeFao is a LanguageCode enum value
26037	LanguageCodeFao = "FAO"
26038
26039	// LanguageCodeFij is a LanguageCode enum value
26040	LanguageCodeFij = "FIJ"
26041
26042	// LanguageCodeFin is a LanguageCode enum value
26043	LanguageCodeFin = "FIN"
26044
26045	// LanguageCodeFrm is a LanguageCode enum value
26046	LanguageCodeFrm = "FRM"
26047
26048	// LanguageCodeFul is a LanguageCode enum value
26049	LanguageCodeFul = "FUL"
26050
26051	// LanguageCodeGla is a LanguageCode enum value
26052	LanguageCodeGla = "GLA"
26053
26054	// LanguageCodeGlg is a LanguageCode enum value
26055	LanguageCodeGlg = "GLG"
26056
26057	// LanguageCodeLug is a LanguageCode enum value
26058	LanguageCodeLug = "LUG"
26059
26060	// LanguageCodeKat is a LanguageCode enum value
26061	LanguageCodeKat = "KAT"
26062
26063	// LanguageCodeEll is a LanguageCode enum value
26064	LanguageCodeEll = "ELL"
26065
26066	// LanguageCodeGrn is a LanguageCode enum value
26067	LanguageCodeGrn = "GRN"
26068
26069	// LanguageCodeGuj is a LanguageCode enum value
26070	LanguageCodeGuj = "GUJ"
26071
26072	// LanguageCodeHat is a LanguageCode enum value
26073	LanguageCodeHat = "HAT"
26074
26075	// LanguageCodeHau is a LanguageCode enum value
26076	LanguageCodeHau = "HAU"
26077
26078	// LanguageCodeHeb is a LanguageCode enum value
26079	LanguageCodeHeb = "HEB"
26080
26081	// LanguageCodeHer is a LanguageCode enum value
26082	LanguageCodeHer = "HER"
26083
26084	// LanguageCodeHmo is a LanguageCode enum value
26085	LanguageCodeHmo = "HMO"
26086
26087	// LanguageCodeHun is a LanguageCode enum value
26088	LanguageCodeHun = "HUN"
26089
26090	// LanguageCodeIsl is a LanguageCode enum value
26091	LanguageCodeIsl = "ISL"
26092
26093	// LanguageCodeIdo is a LanguageCode enum value
26094	LanguageCodeIdo = "IDO"
26095
26096	// LanguageCodeIbo is a LanguageCode enum value
26097	LanguageCodeIbo = "IBO"
26098
26099	// LanguageCodeInd is a LanguageCode enum value
26100	LanguageCodeInd = "IND"
26101
26102	// LanguageCodeIna is a LanguageCode enum value
26103	LanguageCodeIna = "INA"
26104
26105	// LanguageCodeIle is a LanguageCode enum value
26106	LanguageCodeIle = "ILE"
26107
26108	// LanguageCodeIku is a LanguageCode enum value
26109	LanguageCodeIku = "IKU"
26110
26111	// LanguageCodeIpk is a LanguageCode enum value
26112	LanguageCodeIpk = "IPK"
26113
26114	// LanguageCodeGle is a LanguageCode enum value
26115	LanguageCodeGle = "GLE"
26116
26117	// LanguageCodeJav is a LanguageCode enum value
26118	LanguageCodeJav = "JAV"
26119
26120	// LanguageCodeKal is a LanguageCode enum value
26121	LanguageCodeKal = "KAL"
26122
26123	// LanguageCodeKan is a LanguageCode enum value
26124	LanguageCodeKan = "KAN"
26125
26126	// LanguageCodeKau is a LanguageCode enum value
26127	LanguageCodeKau = "KAU"
26128
26129	// LanguageCodeKas is a LanguageCode enum value
26130	LanguageCodeKas = "KAS"
26131
26132	// LanguageCodeKaz is a LanguageCode enum value
26133	LanguageCodeKaz = "KAZ"
26134
26135	// LanguageCodeKik is a LanguageCode enum value
26136	LanguageCodeKik = "KIK"
26137
26138	// LanguageCodeKin is a LanguageCode enum value
26139	LanguageCodeKin = "KIN"
26140
26141	// LanguageCodeKir is a LanguageCode enum value
26142	LanguageCodeKir = "KIR"
26143
26144	// LanguageCodeKom is a LanguageCode enum value
26145	LanguageCodeKom = "KOM"
26146
26147	// LanguageCodeKon is a LanguageCode enum value
26148	LanguageCodeKon = "KON"
26149
26150	// LanguageCodeKua is a LanguageCode enum value
26151	LanguageCodeKua = "KUA"
26152
26153	// LanguageCodeKur is a LanguageCode enum value
26154	LanguageCodeKur = "KUR"
26155
26156	// LanguageCodeLao is a LanguageCode enum value
26157	LanguageCodeLao = "LAO"
26158
26159	// LanguageCodeLat is a LanguageCode enum value
26160	LanguageCodeLat = "LAT"
26161
26162	// LanguageCodeLav is a LanguageCode enum value
26163	LanguageCodeLav = "LAV"
26164
26165	// LanguageCodeLim is a LanguageCode enum value
26166	LanguageCodeLim = "LIM"
26167
26168	// LanguageCodeLin is a LanguageCode enum value
26169	LanguageCodeLin = "LIN"
26170
26171	// LanguageCodeLit is a LanguageCode enum value
26172	LanguageCodeLit = "LIT"
26173
26174	// LanguageCodeLub is a LanguageCode enum value
26175	LanguageCodeLub = "LUB"
26176
26177	// LanguageCodeLtz is a LanguageCode enum value
26178	LanguageCodeLtz = "LTZ"
26179
26180	// LanguageCodeMkd is a LanguageCode enum value
26181	LanguageCodeMkd = "MKD"
26182
26183	// LanguageCodeMlg is a LanguageCode enum value
26184	LanguageCodeMlg = "MLG"
26185
26186	// LanguageCodeMsa is a LanguageCode enum value
26187	LanguageCodeMsa = "MSA"
26188
26189	// LanguageCodeMal is a LanguageCode enum value
26190	LanguageCodeMal = "MAL"
26191
26192	// LanguageCodeMlt is a LanguageCode enum value
26193	LanguageCodeMlt = "MLT"
26194
26195	// LanguageCodeGlv is a LanguageCode enum value
26196	LanguageCodeGlv = "GLV"
26197
26198	// LanguageCodeMri is a LanguageCode enum value
26199	LanguageCodeMri = "MRI"
26200
26201	// LanguageCodeMar is a LanguageCode enum value
26202	LanguageCodeMar = "MAR"
26203
26204	// LanguageCodeMah is a LanguageCode enum value
26205	LanguageCodeMah = "MAH"
26206
26207	// LanguageCodeMon is a LanguageCode enum value
26208	LanguageCodeMon = "MON"
26209
26210	// LanguageCodeNau is a LanguageCode enum value
26211	LanguageCodeNau = "NAU"
26212
26213	// LanguageCodeNav is a LanguageCode enum value
26214	LanguageCodeNav = "NAV"
26215
26216	// LanguageCodeNde is a LanguageCode enum value
26217	LanguageCodeNde = "NDE"
26218
26219	// LanguageCodeNbl is a LanguageCode enum value
26220	LanguageCodeNbl = "NBL"
26221
26222	// LanguageCodeNdo is a LanguageCode enum value
26223	LanguageCodeNdo = "NDO"
26224
26225	// LanguageCodeNep is a LanguageCode enum value
26226	LanguageCodeNep = "NEP"
26227
26228	// LanguageCodeSme is a LanguageCode enum value
26229	LanguageCodeSme = "SME"
26230
26231	// LanguageCodeNor is a LanguageCode enum value
26232	LanguageCodeNor = "NOR"
26233
26234	// LanguageCodeNob is a LanguageCode enum value
26235	LanguageCodeNob = "NOB"
26236
26237	// LanguageCodeNno is a LanguageCode enum value
26238	LanguageCodeNno = "NNO"
26239
26240	// LanguageCodeOci is a LanguageCode enum value
26241	LanguageCodeOci = "OCI"
26242
26243	// LanguageCodeOji is a LanguageCode enum value
26244	LanguageCodeOji = "OJI"
26245
26246	// LanguageCodeOri is a LanguageCode enum value
26247	LanguageCodeOri = "ORI"
26248
26249	// LanguageCodeOrm is a LanguageCode enum value
26250	LanguageCodeOrm = "ORM"
26251
26252	// LanguageCodeOss is a LanguageCode enum value
26253	LanguageCodeOss = "OSS"
26254
26255	// LanguageCodePli is a LanguageCode enum value
26256	LanguageCodePli = "PLI"
26257
26258	// LanguageCodeFas is a LanguageCode enum value
26259	LanguageCodeFas = "FAS"
26260
26261	// LanguageCodePol is a LanguageCode enum value
26262	LanguageCodePol = "POL"
26263
26264	// LanguageCodePus is a LanguageCode enum value
26265	LanguageCodePus = "PUS"
26266
26267	// LanguageCodeQue is a LanguageCode enum value
26268	LanguageCodeQue = "QUE"
26269
26270	// LanguageCodeQaa is a LanguageCode enum value
26271	LanguageCodeQaa = "QAA"
26272
26273	// LanguageCodeRon is a LanguageCode enum value
26274	LanguageCodeRon = "RON"
26275
26276	// LanguageCodeRoh is a LanguageCode enum value
26277	LanguageCodeRoh = "ROH"
26278
26279	// LanguageCodeRun is a LanguageCode enum value
26280	LanguageCodeRun = "RUN"
26281
26282	// LanguageCodeSmo is a LanguageCode enum value
26283	LanguageCodeSmo = "SMO"
26284
26285	// LanguageCodeSag is a LanguageCode enum value
26286	LanguageCodeSag = "SAG"
26287
26288	// LanguageCodeSan is a LanguageCode enum value
26289	LanguageCodeSan = "SAN"
26290
26291	// LanguageCodeSrd is a LanguageCode enum value
26292	LanguageCodeSrd = "SRD"
26293
26294	// LanguageCodeSrb is a LanguageCode enum value
26295	LanguageCodeSrb = "SRB"
26296
26297	// LanguageCodeSna is a LanguageCode enum value
26298	LanguageCodeSna = "SNA"
26299
26300	// LanguageCodeIii is a LanguageCode enum value
26301	LanguageCodeIii = "III"
26302
26303	// LanguageCodeSnd is a LanguageCode enum value
26304	LanguageCodeSnd = "SND"
26305
26306	// LanguageCodeSin is a LanguageCode enum value
26307	LanguageCodeSin = "SIN"
26308
26309	// LanguageCodeSlk is a LanguageCode enum value
26310	LanguageCodeSlk = "SLK"
26311
26312	// LanguageCodeSlv is a LanguageCode enum value
26313	LanguageCodeSlv = "SLV"
26314
26315	// LanguageCodeSom is a LanguageCode enum value
26316	LanguageCodeSom = "SOM"
26317
26318	// LanguageCodeSot is a LanguageCode enum value
26319	LanguageCodeSot = "SOT"
26320
26321	// LanguageCodeSun is a LanguageCode enum value
26322	LanguageCodeSun = "SUN"
26323
26324	// LanguageCodeSwa is a LanguageCode enum value
26325	LanguageCodeSwa = "SWA"
26326
26327	// LanguageCodeSsw is a LanguageCode enum value
26328	LanguageCodeSsw = "SSW"
26329
26330	// LanguageCodeSwe is a LanguageCode enum value
26331	LanguageCodeSwe = "SWE"
26332
26333	// LanguageCodeTgl is a LanguageCode enum value
26334	LanguageCodeTgl = "TGL"
26335
26336	// LanguageCodeTah is a LanguageCode enum value
26337	LanguageCodeTah = "TAH"
26338
26339	// LanguageCodeTgk is a LanguageCode enum value
26340	LanguageCodeTgk = "TGK"
26341
26342	// LanguageCodeTam is a LanguageCode enum value
26343	LanguageCodeTam = "TAM"
26344
26345	// LanguageCodeTat is a LanguageCode enum value
26346	LanguageCodeTat = "TAT"
26347
26348	// LanguageCodeTel is a LanguageCode enum value
26349	LanguageCodeTel = "TEL"
26350
26351	// LanguageCodeTha is a LanguageCode enum value
26352	LanguageCodeTha = "THA"
26353
26354	// LanguageCodeBod is a LanguageCode enum value
26355	LanguageCodeBod = "BOD"
26356
26357	// LanguageCodeTir is a LanguageCode enum value
26358	LanguageCodeTir = "TIR"
26359
26360	// LanguageCodeTon is a LanguageCode enum value
26361	LanguageCodeTon = "TON"
26362
26363	// LanguageCodeTso is a LanguageCode enum value
26364	LanguageCodeTso = "TSO"
26365
26366	// LanguageCodeTsn is a LanguageCode enum value
26367	LanguageCodeTsn = "TSN"
26368
26369	// LanguageCodeTur is a LanguageCode enum value
26370	LanguageCodeTur = "TUR"
26371
26372	// LanguageCodeTuk is a LanguageCode enum value
26373	LanguageCodeTuk = "TUK"
26374
26375	// LanguageCodeTwi is a LanguageCode enum value
26376	LanguageCodeTwi = "TWI"
26377
26378	// LanguageCodeUig is a LanguageCode enum value
26379	LanguageCodeUig = "UIG"
26380
26381	// LanguageCodeUkr is a LanguageCode enum value
26382	LanguageCodeUkr = "UKR"
26383
26384	// LanguageCodeUzb is a LanguageCode enum value
26385	LanguageCodeUzb = "UZB"
26386
26387	// LanguageCodeVen is a LanguageCode enum value
26388	LanguageCodeVen = "VEN"
26389
26390	// LanguageCodeVol is a LanguageCode enum value
26391	LanguageCodeVol = "VOL"
26392
26393	// LanguageCodeWln is a LanguageCode enum value
26394	LanguageCodeWln = "WLN"
26395
26396	// LanguageCodeCym is a LanguageCode enum value
26397	LanguageCodeCym = "CYM"
26398
26399	// LanguageCodeFry is a LanguageCode enum value
26400	LanguageCodeFry = "FRY"
26401
26402	// LanguageCodeWol is a LanguageCode enum value
26403	LanguageCodeWol = "WOL"
26404
26405	// LanguageCodeXho is a LanguageCode enum value
26406	LanguageCodeXho = "XHO"
26407
26408	// LanguageCodeYid is a LanguageCode enum value
26409	LanguageCodeYid = "YID"
26410
26411	// LanguageCodeYor is a LanguageCode enum value
26412	LanguageCodeYor = "YOR"
26413
26414	// LanguageCodeZha is a LanguageCode enum value
26415	LanguageCodeZha = "ZHA"
26416
26417	// LanguageCodeZul is a LanguageCode enum value
26418	LanguageCodeZul = "ZUL"
26419
26420	// LanguageCodeOrj is a LanguageCode enum value
26421	LanguageCodeOrj = "ORJ"
26422
26423	// LanguageCodeQpc is a LanguageCode enum value
26424	LanguageCodeQpc = "QPC"
26425
26426	// LanguageCodeTng is a LanguageCode enum value
26427	LanguageCodeTng = "TNG"
26428)
26429
26430// LanguageCode_Values returns all elements of the LanguageCode enum
26431func LanguageCode_Values() []string {
26432	return []string{
26433		LanguageCodeEng,
26434		LanguageCodeSpa,
26435		LanguageCodeFra,
26436		LanguageCodeDeu,
26437		LanguageCodeGer,
26438		LanguageCodeZho,
26439		LanguageCodeAra,
26440		LanguageCodeHin,
26441		LanguageCodeJpn,
26442		LanguageCodeRus,
26443		LanguageCodePor,
26444		LanguageCodeIta,
26445		LanguageCodeUrd,
26446		LanguageCodeVie,
26447		LanguageCodeKor,
26448		LanguageCodePan,
26449		LanguageCodeAbk,
26450		LanguageCodeAar,
26451		LanguageCodeAfr,
26452		LanguageCodeAka,
26453		LanguageCodeSqi,
26454		LanguageCodeAmh,
26455		LanguageCodeArg,
26456		LanguageCodeHye,
26457		LanguageCodeAsm,
26458		LanguageCodeAva,
26459		LanguageCodeAve,
26460		LanguageCodeAym,
26461		LanguageCodeAze,
26462		LanguageCodeBam,
26463		LanguageCodeBak,
26464		LanguageCodeEus,
26465		LanguageCodeBel,
26466		LanguageCodeBen,
26467		LanguageCodeBih,
26468		LanguageCodeBis,
26469		LanguageCodeBos,
26470		LanguageCodeBre,
26471		LanguageCodeBul,
26472		LanguageCodeMya,
26473		LanguageCodeCat,
26474		LanguageCodeKhm,
26475		LanguageCodeCha,
26476		LanguageCodeChe,
26477		LanguageCodeNya,
26478		LanguageCodeChu,
26479		LanguageCodeChv,
26480		LanguageCodeCor,
26481		LanguageCodeCos,
26482		LanguageCodeCre,
26483		LanguageCodeHrv,
26484		LanguageCodeCes,
26485		LanguageCodeDan,
26486		LanguageCodeDiv,
26487		LanguageCodeNld,
26488		LanguageCodeDzo,
26489		LanguageCodeEnm,
26490		LanguageCodeEpo,
26491		LanguageCodeEst,
26492		LanguageCodeEwe,
26493		LanguageCodeFao,
26494		LanguageCodeFij,
26495		LanguageCodeFin,
26496		LanguageCodeFrm,
26497		LanguageCodeFul,
26498		LanguageCodeGla,
26499		LanguageCodeGlg,
26500		LanguageCodeLug,
26501		LanguageCodeKat,
26502		LanguageCodeEll,
26503		LanguageCodeGrn,
26504		LanguageCodeGuj,
26505		LanguageCodeHat,
26506		LanguageCodeHau,
26507		LanguageCodeHeb,
26508		LanguageCodeHer,
26509		LanguageCodeHmo,
26510		LanguageCodeHun,
26511		LanguageCodeIsl,
26512		LanguageCodeIdo,
26513		LanguageCodeIbo,
26514		LanguageCodeInd,
26515		LanguageCodeIna,
26516		LanguageCodeIle,
26517		LanguageCodeIku,
26518		LanguageCodeIpk,
26519		LanguageCodeGle,
26520		LanguageCodeJav,
26521		LanguageCodeKal,
26522		LanguageCodeKan,
26523		LanguageCodeKau,
26524		LanguageCodeKas,
26525		LanguageCodeKaz,
26526		LanguageCodeKik,
26527		LanguageCodeKin,
26528		LanguageCodeKir,
26529		LanguageCodeKom,
26530		LanguageCodeKon,
26531		LanguageCodeKua,
26532		LanguageCodeKur,
26533		LanguageCodeLao,
26534		LanguageCodeLat,
26535		LanguageCodeLav,
26536		LanguageCodeLim,
26537		LanguageCodeLin,
26538		LanguageCodeLit,
26539		LanguageCodeLub,
26540		LanguageCodeLtz,
26541		LanguageCodeMkd,
26542		LanguageCodeMlg,
26543		LanguageCodeMsa,
26544		LanguageCodeMal,
26545		LanguageCodeMlt,
26546		LanguageCodeGlv,
26547		LanguageCodeMri,
26548		LanguageCodeMar,
26549		LanguageCodeMah,
26550		LanguageCodeMon,
26551		LanguageCodeNau,
26552		LanguageCodeNav,
26553		LanguageCodeNde,
26554		LanguageCodeNbl,
26555		LanguageCodeNdo,
26556		LanguageCodeNep,
26557		LanguageCodeSme,
26558		LanguageCodeNor,
26559		LanguageCodeNob,
26560		LanguageCodeNno,
26561		LanguageCodeOci,
26562		LanguageCodeOji,
26563		LanguageCodeOri,
26564		LanguageCodeOrm,
26565		LanguageCodeOss,
26566		LanguageCodePli,
26567		LanguageCodeFas,
26568		LanguageCodePol,
26569		LanguageCodePus,
26570		LanguageCodeQue,
26571		LanguageCodeQaa,
26572		LanguageCodeRon,
26573		LanguageCodeRoh,
26574		LanguageCodeRun,
26575		LanguageCodeSmo,
26576		LanguageCodeSag,
26577		LanguageCodeSan,
26578		LanguageCodeSrd,
26579		LanguageCodeSrb,
26580		LanguageCodeSna,
26581		LanguageCodeIii,
26582		LanguageCodeSnd,
26583		LanguageCodeSin,
26584		LanguageCodeSlk,
26585		LanguageCodeSlv,
26586		LanguageCodeSom,
26587		LanguageCodeSot,
26588		LanguageCodeSun,
26589		LanguageCodeSwa,
26590		LanguageCodeSsw,
26591		LanguageCodeSwe,
26592		LanguageCodeTgl,
26593		LanguageCodeTah,
26594		LanguageCodeTgk,
26595		LanguageCodeTam,
26596		LanguageCodeTat,
26597		LanguageCodeTel,
26598		LanguageCodeTha,
26599		LanguageCodeBod,
26600		LanguageCodeTir,
26601		LanguageCodeTon,
26602		LanguageCodeTso,
26603		LanguageCodeTsn,
26604		LanguageCodeTur,
26605		LanguageCodeTuk,
26606		LanguageCodeTwi,
26607		LanguageCodeUig,
26608		LanguageCodeUkr,
26609		LanguageCodeUzb,
26610		LanguageCodeVen,
26611		LanguageCodeVol,
26612		LanguageCodeWln,
26613		LanguageCodeCym,
26614		LanguageCodeFry,
26615		LanguageCodeWol,
26616		LanguageCodeXho,
26617		LanguageCodeYid,
26618		LanguageCodeYor,
26619		LanguageCodeZha,
26620		LanguageCodeZul,
26621		LanguageCodeOrj,
26622		LanguageCodeQpc,
26623		LanguageCodeTng,
26624	}
26625}
26626
26627// Selects between the DVB and ATSC buffer models for Dolby Digital audio.
26628const (
26629	// M2tsAudioBufferModelDvb is a M2tsAudioBufferModel enum value
26630	M2tsAudioBufferModelDvb = "DVB"
26631
26632	// M2tsAudioBufferModelAtsc is a M2tsAudioBufferModel enum value
26633	M2tsAudioBufferModelAtsc = "ATSC"
26634)
26635
26636// M2tsAudioBufferModel_Values returns all elements of the M2tsAudioBufferModel enum
26637func M2tsAudioBufferModel_Values() []string {
26638	return []string{
26639		M2tsAudioBufferModelDvb,
26640		M2tsAudioBufferModelAtsc,
26641	}
26642}
26643
26644// Specify this setting only when your output will be consumed by a downstream
26645// repackaging workflow that is sensitive to very small duration differences
26646// between video and audio. For this situation, choose Match video duration
26647// (MATCH_VIDEO_DURATION). In all other cases, keep the default value, Default
26648// codec duration (DEFAULT_CODEC_DURATION). When you choose Match video duration,
26649// MediaConvert pads the output audio streams with silence or trims them to
26650// ensure that the total duration of each audio stream is at least as long as
26651// the total duration of the video stream. After padding or trimming, the audio
26652// stream duration is no more than one frame longer than the video stream. MediaConvert
26653// applies audio padding or trimming only to the end of the last segment of
26654// the output. For unsegmented outputs, MediaConvert adds padding only to the
26655// end of the file. When you keep the default value, any minor discrepancies
26656// between audio and video duration will depend on your output audio codec.
26657const (
26658	// M2tsAudioDurationDefaultCodecDuration is a M2tsAudioDuration enum value
26659	M2tsAudioDurationDefaultCodecDuration = "DEFAULT_CODEC_DURATION"
26660
26661	// M2tsAudioDurationMatchVideoDuration is a M2tsAudioDuration enum value
26662	M2tsAudioDurationMatchVideoDuration = "MATCH_VIDEO_DURATION"
26663)
26664
26665// M2tsAudioDuration_Values returns all elements of the M2tsAudioDuration enum
26666func M2tsAudioDuration_Values() []string {
26667	return []string{
26668		M2tsAudioDurationDefaultCodecDuration,
26669		M2tsAudioDurationMatchVideoDuration,
26670	}
26671}
26672
26673// Controls what buffer model to use for accurate interleaving. If set to MULTIPLEX,
26674// use multiplex buffer model. If set to NONE, this can lead to lower latency,
26675// but low-memory devices may not be able to play back the stream without interruptions.
26676const (
26677	// M2tsBufferModelMultiplex is a M2tsBufferModel enum value
26678	M2tsBufferModelMultiplex = "MULTIPLEX"
26679
26680	// M2tsBufferModelNone is a M2tsBufferModel enum value
26681	M2tsBufferModelNone = "NONE"
26682)
26683
26684// M2tsBufferModel_Values returns all elements of the M2tsBufferModel enum
26685func M2tsBufferModel_Values() []string {
26686	return []string{
26687		M2tsBufferModelMultiplex,
26688		M2tsBufferModelNone,
26689	}
26690}
26691
26692// When set to VIDEO_AND_FIXED_INTERVALS, audio EBP markers will be added to
26693// partitions 3 and 4. The interval between these additional markers will be
26694// fixed, and will be slightly shorter than the video EBP marker interval. When
26695// set to VIDEO_INTERVAL, these additional markers will not be inserted. Only
26696// applicable when EBP segmentation markers are is selected (segmentationMarkers
26697// is EBP or EBP_LEGACY).
26698const (
26699	// M2tsEbpAudioIntervalVideoAndFixedIntervals is a M2tsEbpAudioInterval enum value
26700	M2tsEbpAudioIntervalVideoAndFixedIntervals = "VIDEO_AND_FIXED_INTERVALS"
26701
26702	// M2tsEbpAudioIntervalVideoInterval is a M2tsEbpAudioInterval enum value
26703	M2tsEbpAudioIntervalVideoInterval = "VIDEO_INTERVAL"
26704)
26705
26706// M2tsEbpAudioInterval_Values returns all elements of the M2tsEbpAudioInterval enum
26707func M2tsEbpAudioInterval_Values() []string {
26708	return []string{
26709		M2tsEbpAudioIntervalVideoAndFixedIntervals,
26710		M2tsEbpAudioIntervalVideoInterval,
26711	}
26712}
26713
26714// Selects which PIDs to place EBP markers on. They can either be placed only
26715// on the video PID, or on both the video PID and all audio PIDs. Only applicable
26716// when EBP segmentation markers are is selected (segmentationMarkers is EBP
26717// or EBP_LEGACY).
26718const (
26719	// M2tsEbpPlacementVideoAndAudioPids is a M2tsEbpPlacement enum value
26720	M2tsEbpPlacementVideoAndAudioPids = "VIDEO_AND_AUDIO_PIDS"
26721
26722	// M2tsEbpPlacementVideoPid is a M2tsEbpPlacement enum value
26723	M2tsEbpPlacementVideoPid = "VIDEO_PID"
26724)
26725
26726// M2tsEbpPlacement_Values returns all elements of the M2tsEbpPlacement enum
26727func M2tsEbpPlacement_Values() []string {
26728	return []string{
26729		M2tsEbpPlacementVideoAndAudioPids,
26730		M2tsEbpPlacementVideoPid,
26731	}
26732}
26733
26734// Controls whether to include the ES Rate field in the PES header.
26735const (
26736	// M2tsEsRateInPesInclude is a M2tsEsRateInPes enum value
26737	M2tsEsRateInPesInclude = "INCLUDE"
26738
26739	// M2tsEsRateInPesExclude is a M2tsEsRateInPes enum value
26740	M2tsEsRateInPesExclude = "EXCLUDE"
26741)
26742
26743// M2tsEsRateInPes_Values returns all elements of the M2tsEsRateInPes enum
26744func M2tsEsRateInPes_Values() []string {
26745	return []string{
26746		M2tsEsRateInPesInclude,
26747		M2tsEsRateInPesExclude,
26748	}
26749}
26750
26751// Keep the default value (DEFAULT) unless you know that your audio EBP markers
26752// are incorrectly appearing before your video EBP markers. To correct this
26753// problem, set this value to Force (FORCE).
26754const (
26755	// M2tsForceTsVideoEbpOrderForce is a M2tsForceTsVideoEbpOrder enum value
26756	M2tsForceTsVideoEbpOrderForce = "FORCE"
26757
26758	// M2tsForceTsVideoEbpOrderDefault is a M2tsForceTsVideoEbpOrder enum value
26759	M2tsForceTsVideoEbpOrderDefault = "DEFAULT"
26760)
26761
26762// M2tsForceTsVideoEbpOrder_Values returns all elements of the M2tsForceTsVideoEbpOrder enum
26763func M2tsForceTsVideoEbpOrder_Values() []string {
26764	return []string{
26765		M2tsForceTsVideoEbpOrderForce,
26766		M2tsForceTsVideoEbpOrderDefault,
26767	}
26768}
26769
26770// If INSERT, Nielsen inaudible tones for media tracking will be detected in
26771// the input audio and an equivalent ID3 tag will be inserted in the output.
26772const (
26773	// M2tsNielsenId3Insert is a M2tsNielsenId3 enum value
26774	M2tsNielsenId3Insert = "INSERT"
26775
26776	// M2tsNielsenId3None is a M2tsNielsenId3 enum value
26777	M2tsNielsenId3None = "NONE"
26778)
26779
26780// M2tsNielsenId3_Values returns all elements of the M2tsNielsenId3 enum
26781func M2tsNielsenId3_Values() []string {
26782	return []string{
26783		M2tsNielsenId3Insert,
26784		M2tsNielsenId3None,
26785	}
26786}
26787
26788// When set to PCR_EVERY_PES_PACKET, a Program Clock Reference value is inserted
26789// for every Packetized Elementary Stream (PES) header. This is effective only
26790// when the PCR PID is the same as the video or audio elementary stream.
26791const (
26792	// M2tsPcrControlPcrEveryPesPacket is a M2tsPcrControl enum value
26793	M2tsPcrControlPcrEveryPesPacket = "PCR_EVERY_PES_PACKET"
26794
26795	// M2tsPcrControlConfiguredPcrPeriod is a M2tsPcrControl enum value
26796	M2tsPcrControlConfiguredPcrPeriod = "CONFIGURED_PCR_PERIOD"
26797)
26798
26799// M2tsPcrControl_Values returns all elements of the M2tsPcrControl enum
26800func M2tsPcrControl_Values() []string {
26801	return []string{
26802		M2tsPcrControlPcrEveryPesPacket,
26803		M2tsPcrControlConfiguredPcrPeriod,
26804	}
26805}
26806
26807// When set to CBR, inserts null packets into transport stream to fill specified
26808// bitrate. When set to VBR, the bitrate setting acts as the maximum bitrate,
26809// but the output will not be padded up to that bitrate.
26810const (
26811	// M2tsRateModeVbr is a M2tsRateMode enum value
26812	M2tsRateModeVbr = "VBR"
26813
26814	// M2tsRateModeCbr is a M2tsRateMode enum value
26815	M2tsRateModeCbr = "CBR"
26816)
26817
26818// M2tsRateMode_Values returns all elements of the M2tsRateMode enum
26819func M2tsRateMode_Values() []string {
26820	return []string{
26821		M2tsRateModeVbr,
26822		M2tsRateModeCbr,
26823	}
26824}
26825
26826// For SCTE-35 markers from your input-- Choose Passthrough (PASSTHROUGH) if
26827// you want SCTE-35 markers that appear in your input to also appear in this
26828// output. Choose None (NONE) if you don't want SCTE-35 markers in this output.
26829// For SCTE-35 markers from an ESAM XML document-- Choose None (NONE). Also
26830// provide the ESAM XML as a string in the setting Signal processing notification
26831// XML (sccXml). Also enable ESAM SCTE-35 (include the property scte35Esam).
26832const (
26833	// M2tsScte35SourcePassthrough is a M2tsScte35Source enum value
26834	M2tsScte35SourcePassthrough = "PASSTHROUGH"
26835
26836	// M2tsScte35SourceNone is a M2tsScte35Source enum value
26837	M2tsScte35SourceNone = "NONE"
26838)
26839
26840// M2tsScte35Source_Values returns all elements of the M2tsScte35Source enum
26841func M2tsScte35Source_Values() []string {
26842	return []string{
26843		M2tsScte35SourcePassthrough,
26844		M2tsScte35SourceNone,
26845	}
26846}
26847
26848// Inserts segmentation markers at each segmentation_time period. rai_segstart
26849// sets the Random Access Indicator bit in the adaptation field. rai_adapt sets
26850// the RAI bit and adds the current timecode in the private data bytes. psi_segstart
26851// inserts PAT and PMT tables at the start of segments. ebp adds Encoder Boundary
26852// Point information to the adaptation field as per OpenCable specification
26853// OC-SP-EBP-I01-130118. ebp_legacy adds Encoder Boundary Point information
26854// to the adaptation field using a legacy proprietary format.
26855const (
26856	// M2tsSegmentationMarkersNone is a M2tsSegmentationMarkers enum value
26857	M2tsSegmentationMarkersNone = "NONE"
26858
26859	// M2tsSegmentationMarkersRaiSegstart is a M2tsSegmentationMarkers enum value
26860	M2tsSegmentationMarkersRaiSegstart = "RAI_SEGSTART"
26861
26862	// M2tsSegmentationMarkersRaiAdapt is a M2tsSegmentationMarkers enum value
26863	M2tsSegmentationMarkersRaiAdapt = "RAI_ADAPT"
26864
26865	// M2tsSegmentationMarkersPsiSegstart is a M2tsSegmentationMarkers enum value
26866	M2tsSegmentationMarkersPsiSegstart = "PSI_SEGSTART"
26867
26868	// M2tsSegmentationMarkersEbp is a M2tsSegmentationMarkers enum value
26869	M2tsSegmentationMarkersEbp = "EBP"
26870
26871	// M2tsSegmentationMarkersEbpLegacy is a M2tsSegmentationMarkers enum value
26872	M2tsSegmentationMarkersEbpLegacy = "EBP_LEGACY"
26873)
26874
26875// M2tsSegmentationMarkers_Values returns all elements of the M2tsSegmentationMarkers enum
26876func M2tsSegmentationMarkers_Values() []string {
26877	return []string{
26878		M2tsSegmentationMarkersNone,
26879		M2tsSegmentationMarkersRaiSegstart,
26880		M2tsSegmentationMarkersRaiAdapt,
26881		M2tsSegmentationMarkersPsiSegstart,
26882		M2tsSegmentationMarkersEbp,
26883		M2tsSegmentationMarkersEbpLegacy,
26884	}
26885}
26886
26887// The segmentation style parameter controls how segmentation markers are inserted
26888// into the transport stream. With avails, it is possible that segments may
26889// be truncated, which can influence where future segmentation markers are inserted.
26890// When a segmentation style of "reset_cadence" is selected and a segment is
26891// truncated due to an avail, we will reset the segmentation cadence. This means
26892// the subsequent segment will have a duration of of $segmentation_time seconds.
26893// When a segmentation style of "maintain_cadence" is selected and a segment
26894// is truncated due to an avail, we will not reset the segmentation cadence.
26895// This means the subsequent segment will likely be truncated as well. However,
26896// all segments after that will have a duration of $segmentation_time seconds.
26897// Note that EBP lookahead is a slight exception to this rule.
26898const (
26899	// M2tsSegmentationStyleMaintainCadence is a M2tsSegmentationStyle enum value
26900	M2tsSegmentationStyleMaintainCadence = "MAINTAIN_CADENCE"
26901
26902	// M2tsSegmentationStyleResetCadence is a M2tsSegmentationStyle enum value
26903	M2tsSegmentationStyleResetCadence = "RESET_CADENCE"
26904)
26905
26906// M2tsSegmentationStyle_Values returns all elements of the M2tsSegmentationStyle enum
26907func M2tsSegmentationStyle_Values() []string {
26908	return []string{
26909		M2tsSegmentationStyleMaintainCadence,
26910		M2tsSegmentationStyleResetCadence,
26911	}
26912}
26913
26914// Specify this setting only when your output will be consumed by a downstream
26915// repackaging workflow that is sensitive to very small duration differences
26916// between video and audio. For this situation, choose Match video duration
26917// (MATCH_VIDEO_DURATION). In all other cases, keep the default value, Default
26918// codec duration (DEFAULT_CODEC_DURATION). When you choose Match video duration,
26919// MediaConvert pads the output audio streams with silence or trims them to
26920// ensure that the total duration of each audio stream is at least as long as
26921// the total duration of the video stream. After padding or trimming, the audio
26922// stream duration is no more than one frame longer than the video stream. MediaConvert
26923// applies audio padding or trimming only to the end of the last segment of
26924// the output. For unsegmented outputs, MediaConvert adds padding only to the
26925// end of the file. When you keep the default value, any minor discrepancies
26926// between audio and video duration will depend on your output audio codec.
26927const (
26928	// M3u8AudioDurationDefaultCodecDuration is a M3u8AudioDuration enum value
26929	M3u8AudioDurationDefaultCodecDuration = "DEFAULT_CODEC_DURATION"
26930
26931	// M3u8AudioDurationMatchVideoDuration is a M3u8AudioDuration enum value
26932	M3u8AudioDurationMatchVideoDuration = "MATCH_VIDEO_DURATION"
26933)
26934
26935// M3u8AudioDuration_Values returns all elements of the M3u8AudioDuration enum
26936func M3u8AudioDuration_Values() []string {
26937	return []string{
26938		M3u8AudioDurationDefaultCodecDuration,
26939		M3u8AudioDurationMatchVideoDuration,
26940	}
26941}
26942
26943// If INSERT, Nielsen inaudible tones for media tracking will be detected in
26944// the input audio and an equivalent ID3 tag will be inserted in the output.
26945const (
26946	// M3u8NielsenId3Insert is a M3u8NielsenId3 enum value
26947	M3u8NielsenId3Insert = "INSERT"
26948
26949	// M3u8NielsenId3None is a M3u8NielsenId3 enum value
26950	M3u8NielsenId3None = "NONE"
26951)
26952
26953// M3u8NielsenId3_Values returns all elements of the M3u8NielsenId3 enum
26954func M3u8NielsenId3_Values() []string {
26955	return []string{
26956		M3u8NielsenId3Insert,
26957		M3u8NielsenId3None,
26958	}
26959}
26960
26961// When set to PCR_EVERY_PES_PACKET a Program Clock Reference value is inserted
26962// for every Packetized Elementary Stream (PES) header. This parameter is effective
26963// only when the PCR PID is the same as the video or audio elementary stream.
26964const (
26965	// M3u8PcrControlPcrEveryPesPacket is a M3u8PcrControl enum value
26966	M3u8PcrControlPcrEveryPesPacket = "PCR_EVERY_PES_PACKET"
26967
26968	// M3u8PcrControlConfiguredPcrPeriod is a M3u8PcrControl enum value
26969	M3u8PcrControlConfiguredPcrPeriod = "CONFIGURED_PCR_PERIOD"
26970)
26971
26972// M3u8PcrControl_Values returns all elements of the M3u8PcrControl enum
26973func M3u8PcrControl_Values() []string {
26974	return []string{
26975		M3u8PcrControlPcrEveryPesPacket,
26976		M3u8PcrControlConfiguredPcrPeriod,
26977	}
26978}
26979
26980// For SCTE-35 markers from your input-- Choose Passthrough (PASSTHROUGH) if
26981// you want SCTE-35 markers that appear in your input to also appear in this
26982// output. Choose None (NONE) if you don't want SCTE-35 markers in this output.
26983// For SCTE-35 markers from an ESAM XML document-- Choose None (NONE) if you
26984// don't want manifest conditioning. Choose Passthrough (PASSTHROUGH) and choose
26985// Ad markers (adMarkers) if you do want manifest conditioning. In both cases,
26986// also provide the ESAM XML as a string in the setting Signal processing notification
26987// XML (sccXml).
26988const (
26989	// M3u8Scte35SourcePassthrough is a M3u8Scte35Source enum value
26990	M3u8Scte35SourcePassthrough = "PASSTHROUGH"
26991
26992	// M3u8Scte35SourceNone is a M3u8Scte35Source enum value
26993	M3u8Scte35SourceNone = "NONE"
26994)
26995
26996// M3u8Scte35Source_Values returns all elements of the M3u8Scte35Source enum
26997func M3u8Scte35Source_Values() []string {
26998	return []string{
26999		M3u8Scte35SourcePassthrough,
27000		M3u8Scte35SourceNone,
27001	}
27002}
27003
27004// Choose the type of motion graphic asset that you are providing for your overlay.
27005// You can choose either a .mov file or a series of .png files.
27006const (
27007	// MotionImageInsertionModeMov is a MotionImageInsertionMode enum value
27008	MotionImageInsertionModeMov = "MOV"
27009
27010	// MotionImageInsertionModePng is a MotionImageInsertionMode enum value
27011	MotionImageInsertionModePng = "PNG"
27012)
27013
27014// MotionImageInsertionMode_Values returns all elements of the MotionImageInsertionMode enum
27015func MotionImageInsertionMode_Values() []string {
27016	return []string{
27017		MotionImageInsertionModeMov,
27018		MotionImageInsertionModePng,
27019	}
27020}
27021
27022// Specify whether your motion graphic overlay repeats on a loop or plays only
27023// once.
27024const (
27025	// MotionImagePlaybackOnce is a MotionImagePlayback enum value
27026	MotionImagePlaybackOnce = "ONCE"
27027
27028	// MotionImagePlaybackRepeat is a MotionImagePlayback enum value
27029	MotionImagePlaybackRepeat = "REPEAT"
27030)
27031
27032// MotionImagePlayback_Values returns all elements of the MotionImagePlayback enum
27033func MotionImagePlayback_Values() []string {
27034	return []string{
27035		MotionImagePlaybackOnce,
27036		MotionImagePlaybackRepeat,
27037	}
27038}
27039
27040// When enabled, include 'clap' atom if appropriate for the video output settings.
27041const (
27042	// MovClapAtomInclude is a MovClapAtom enum value
27043	MovClapAtomInclude = "INCLUDE"
27044
27045	// MovClapAtomExclude is a MovClapAtom enum value
27046	MovClapAtomExclude = "EXCLUDE"
27047)
27048
27049// MovClapAtom_Values returns all elements of the MovClapAtom enum
27050func MovClapAtom_Values() []string {
27051	return []string{
27052		MovClapAtomInclude,
27053		MovClapAtomExclude,
27054	}
27055}
27056
27057// When enabled, file composition times will start at zero, composition times
27058// in the 'ctts' (composition time to sample) box for B-frames will be negative,
27059// and a 'cslg' (composition shift least greatest) box will be included per
27060// 14496-1 amendment 1. This improves compatibility with Apple players and tools.
27061const (
27062	// MovCslgAtomInclude is a MovCslgAtom enum value
27063	MovCslgAtomInclude = "INCLUDE"
27064
27065	// MovCslgAtomExclude is a MovCslgAtom enum value
27066	MovCslgAtomExclude = "EXCLUDE"
27067)
27068
27069// MovCslgAtom_Values returns all elements of the MovCslgAtom enum
27070func MovCslgAtom_Values() []string {
27071	return []string{
27072		MovCslgAtomInclude,
27073		MovCslgAtomExclude,
27074	}
27075}
27076
27077// When set to XDCAM, writes MPEG2 video streams into the QuickTime file using
27078// XDCAM fourcc codes. This increases compatibility with Apple editors and players,
27079// but may decrease compatibility with other players. Only applicable when the
27080// video codec is MPEG2.
27081const (
27082	// MovMpeg2FourCCControlXdcam is a MovMpeg2FourCCControl enum value
27083	MovMpeg2FourCCControlXdcam = "XDCAM"
27084
27085	// MovMpeg2FourCCControlMpeg is a MovMpeg2FourCCControl enum value
27086	MovMpeg2FourCCControlMpeg = "MPEG"
27087)
27088
27089// MovMpeg2FourCCControl_Values returns all elements of the MovMpeg2FourCCControl enum
27090func MovMpeg2FourCCControl_Values() []string {
27091	return []string{
27092		MovMpeg2FourCCControlXdcam,
27093		MovMpeg2FourCCControlMpeg,
27094	}
27095}
27096
27097// To make this output compatible with Omenon, keep the default value, OMNEON.
27098// Unless you need Omneon compatibility, set this value to NONE. When you keep
27099// the default value, OMNEON, MediaConvert increases the length of the edit
27100// list atom. This might cause file rejections when a recipient of the output
27101// file doesn't expct this extra padding.
27102const (
27103	// MovPaddingControlOmneon is a MovPaddingControl enum value
27104	MovPaddingControlOmneon = "OMNEON"
27105
27106	// MovPaddingControlNone is a MovPaddingControl enum value
27107	MovPaddingControlNone = "NONE"
27108)
27109
27110// MovPaddingControl_Values returns all elements of the MovPaddingControl enum
27111func MovPaddingControl_Values() []string {
27112	return []string{
27113		MovPaddingControlOmneon,
27114		MovPaddingControlNone,
27115	}
27116}
27117
27118// Always keep the default value (SELF_CONTAINED) for this setting.
27119const (
27120	// MovReferenceSelfContained is a MovReference enum value
27121	MovReferenceSelfContained = "SELF_CONTAINED"
27122
27123	// MovReferenceExternal is a MovReference enum value
27124	MovReferenceExternal = "EXTERNAL"
27125)
27126
27127// MovReference_Values returns all elements of the MovReference enum
27128func MovReference_Values() []string {
27129	return []string{
27130		MovReferenceSelfContained,
27131		MovReferenceExternal,
27132	}
27133}
27134
27135// Specify whether the service encodes this MP3 audio output with a constant
27136// bitrate (CBR) or a variable bitrate (VBR).
27137const (
27138	// Mp3RateControlModeCbr is a Mp3RateControlMode enum value
27139	Mp3RateControlModeCbr = "CBR"
27140
27141	// Mp3RateControlModeVbr is a Mp3RateControlMode enum value
27142	Mp3RateControlModeVbr = "VBR"
27143)
27144
27145// Mp3RateControlMode_Values returns all elements of the Mp3RateControlMode enum
27146func Mp3RateControlMode_Values() []string {
27147	return []string{
27148		Mp3RateControlModeCbr,
27149		Mp3RateControlModeVbr,
27150	}
27151}
27152
27153// When enabled, file composition times will start at zero, composition times
27154// in the 'ctts' (composition time to sample) box for B-frames will be negative,
27155// and a 'cslg' (composition shift least greatest) box will be included per
27156// 14496-1 amendment 1. This improves compatibility with Apple players and tools.
27157const (
27158	// Mp4CslgAtomInclude is a Mp4CslgAtom enum value
27159	Mp4CslgAtomInclude = "INCLUDE"
27160
27161	// Mp4CslgAtomExclude is a Mp4CslgAtom enum value
27162	Mp4CslgAtomExclude = "EXCLUDE"
27163)
27164
27165// Mp4CslgAtom_Values returns all elements of the Mp4CslgAtom enum
27166func Mp4CslgAtom_Values() []string {
27167	return []string{
27168		Mp4CslgAtomInclude,
27169		Mp4CslgAtomExclude,
27170	}
27171}
27172
27173// Inserts a free-space box immediately after the moov box.
27174const (
27175	// Mp4FreeSpaceBoxInclude is a Mp4FreeSpaceBox enum value
27176	Mp4FreeSpaceBoxInclude = "INCLUDE"
27177
27178	// Mp4FreeSpaceBoxExclude is a Mp4FreeSpaceBox enum value
27179	Mp4FreeSpaceBoxExclude = "EXCLUDE"
27180)
27181
27182// Mp4FreeSpaceBox_Values returns all elements of the Mp4FreeSpaceBox enum
27183func Mp4FreeSpaceBox_Values() []string {
27184	return []string{
27185		Mp4FreeSpaceBoxInclude,
27186		Mp4FreeSpaceBoxExclude,
27187	}
27188}
27189
27190// If set to PROGRESSIVE_DOWNLOAD, the MOOV atom is relocated to the beginning
27191// of the archive as required for progressive downloading. Otherwise it is placed
27192// normally at the end.
27193const (
27194	// Mp4MoovPlacementProgressiveDownload is a Mp4MoovPlacement enum value
27195	Mp4MoovPlacementProgressiveDownload = "PROGRESSIVE_DOWNLOAD"
27196
27197	// Mp4MoovPlacementNormal is a Mp4MoovPlacement enum value
27198	Mp4MoovPlacementNormal = "NORMAL"
27199)
27200
27201// Mp4MoovPlacement_Values returns all elements of the Mp4MoovPlacement enum
27202func Mp4MoovPlacement_Values() []string {
27203	return []string{
27204		Mp4MoovPlacementProgressiveDownload,
27205		Mp4MoovPlacementNormal,
27206	}
27207}
27208
27209// Optional. Choose Include (INCLUDE) to have MediaConvert mark up your DASH
27210// manifest with elements for embedded 608 captions. This markup isn't generally
27211// required, but some video players require it to discover and play embedded
27212// 608 captions. Keep the default value, Exclude (EXCLUDE), to leave these elements
27213// out. When you enable this setting, this is the markup that MediaConvert includes
27214// in your manifest:
27215const (
27216	// MpdAccessibilityCaptionHintsInclude is a MpdAccessibilityCaptionHints enum value
27217	MpdAccessibilityCaptionHintsInclude = "INCLUDE"
27218
27219	// MpdAccessibilityCaptionHintsExclude is a MpdAccessibilityCaptionHints enum value
27220	MpdAccessibilityCaptionHintsExclude = "EXCLUDE"
27221)
27222
27223// MpdAccessibilityCaptionHints_Values returns all elements of the MpdAccessibilityCaptionHints enum
27224func MpdAccessibilityCaptionHints_Values() []string {
27225	return []string{
27226		MpdAccessibilityCaptionHintsInclude,
27227		MpdAccessibilityCaptionHintsExclude,
27228	}
27229}
27230
27231// Specify this setting only when your output will be consumed by a downstream
27232// repackaging workflow that is sensitive to very small duration differences
27233// between video and audio. For this situation, choose Match video duration
27234// (MATCH_VIDEO_DURATION). In all other cases, keep the default value, Default
27235// codec duration (DEFAULT_CODEC_DURATION). When you choose Match video duration,
27236// MediaConvert pads the output audio streams with silence or trims them to
27237// ensure that the total duration of each audio stream is at least as long as
27238// the total duration of the video stream. After padding or trimming, the audio
27239// stream duration is no more than one frame longer than the video stream. MediaConvert
27240// applies audio padding or trimming only to the end of the last segment of
27241// the output. For unsegmented outputs, MediaConvert adds padding only to the
27242// end of the file. When you keep the default value, any minor discrepancies
27243// between audio and video duration will depend on your output audio codec.
27244const (
27245	// MpdAudioDurationDefaultCodecDuration is a MpdAudioDuration enum value
27246	MpdAudioDurationDefaultCodecDuration = "DEFAULT_CODEC_DURATION"
27247
27248	// MpdAudioDurationMatchVideoDuration is a MpdAudioDuration enum value
27249	MpdAudioDurationMatchVideoDuration = "MATCH_VIDEO_DURATION"
27250)
27251
27252// MpdAudioDuration_Values returns all elements of the MpdAudioDuration enum
27253func MpdAudioDuration_Values() []string {
27254	return []string{
27255		MpdAudioDurationDefaultCodecDuration,
27256		MpdAudioDurationMatchVideoDuration,
27257	}
27258}
27259
27260// Use this setting only in DASH output groups that include sidecar TTML or
27261// IMSC captions. You specify sidecar captions in a separate output from your
27262// audio and video. Choose Raw (RAW) for captions in a single XML file in a
27263// raw container. Choose Fragmented MPEG-4 (FRAGMENTED_MP4) for captions in
27264// XML format contained within fragmented MP4 files. This set of fragmented
27265// MP4 files is separate from your video and audio fragmented MP4 files.
27266const (
27267	// MpdCaptionContainerTypeRaw is a MpdCaptionContainerType enum value
27268	MpdCaptionContainerTypeRaw = "RAW"
27269
27270	// MpdCaptionContainerTypeFragmentedMp4 is a MpdCaptionContainerType enum value
27271	MpdCaptionContainerTypeFragmentedMp4 = "FRAGMENTED_MP4"
27272)
27273
27274// MpdCaptionContainerType_Values returns all elements of the MpdCaptionContainerType enum
27275func MpdCaptionContainerType_Values() []string {
27276	return []string{
27277		MpdCaptionContainerTypeRaw,
27278		MpdCaptionContainerTypeFragmentedMp4,
27279	}
27280}
27281
27282// Use this setting only when you specify SCTE-35 markers from ESAM. Choose
27283// INSERT to put SCTE-35 markers in this output at the insertion points that
27284// you specify in an ESAM XML document. Provide the document in the setting
27285// SCC XML (sccXml).
27286const (
27287	// MpdScte35EsamInsert is a MpdScte35Esam enum value
27288	MpdScte35EsamInsert = "INSERT"
27289
27290	// MpdScte35EsamNone is a MpdScte35Esam enum value
27291	MpdScte35EsamNone = "NONE"
27292)
27293
27294// MpdScte35Esam_Values returns all elements of the MpdScte35Esam enum
27295func MpdScte35Esam_Values() []string {
27296	return []string{
27297		MpdScte35EsamInsert,
27298		MpdScte35EsamNone,
27299	}
27300}
27301
27302// Ignore this setting unless you have SCTE-35 markers in your input video file.
27303// Choose Passthrough (PASSTHROUGH) if you want SCTE-35 markers that appear
27304// in your input to also appear in this output. Choose None (NONE) if you don't
27305// want those SCTE-35 markers in this output.
27306const (
27307	// MpdScte35SourcePassthrough is a MpdScte35Source enum value
27308	MpdScte35SourcePassthrough = "PASSTHROUGH"
27309
27310	// MpdScte35SourceNone is a MpdScte35Source enum value
27311	MpdScte35SourceNone = "NONE"
27312)
27313
27314// MpdScte35Source_Values returns all elements of the MpdScte35Source enum
27315func MpdScte35Source_Values() []string {
27316	return []string{
27317		MpdScte35SourcePassthrough,
27318		MpdScte35SourceNone,
27319	}
27320}
27321
27322// Specify the strength of any adaptive quantization filters that you enable.
27323// The value that you choose here applies to the following settings: Spatial
27324// adaptive quantization (spatialAdaptiveQuantization), and Temporal adaptive
27325// quantization (temporalAdaptiveQuantization).
27326const (
27327	// Mpeg2AdaptiveQuantizationOff is a Mpeg2AdaptiveQuantization enum value
27328	Mpeg2AdaptiveQuantizationOff = "OFF"
27329
27330	// Mpeg2AdaptiveQuantizationLow is a Mpeg2AdaptiveQuantization enum value
27331	Mpeg2AdaptiveQuantizationLow = "LOW"
27332
27333	// Mpeg2AdaptiveQuantizationMedium is a Mpeg2AdaptiveQuantization enum value
27334	Mpeg2AdaptiveQuantizationMedium = "MEDIUM"
27335
27336	// Mpeg2AdaptiveQuantizationHigh is a Mpeg2AdaptiveQuantization enum value
27337	Mpeg2AdaptiveQuantizationHigh = "HIGH"
27338)
27339
27340// Mpeg2AdaptiveQuantization_Values returns all elements of the Mpeg2AdaptiveQuantization enum
27341func Mpeg2AdaptiveQuantization_Values() []string {
27342	return []string{
27343		Mpeg2AdaptiveQuantizationOff,
27344		Mpeg2AdaptiveQuantizationLow,
27345		Mpeg2AdaptiveQuantizationMedium,
27346		Mpeg2AdaptiveQuantizationHigh,
27347	}
27348}
27349
27350// Use Level (Mpeg2CodecLevel) to set the MPEG-2 level for the video output.
27351const (
27352	// Mpeg2CodecLevelAuto is a Mpeg2CodecLevel enum value
27353	Mpeg2CodecLevelAuto = "AUTO"
27354
27355	// Mpeg2CodecLevelLow is a Mpeg2CodecLevel enum value
27356	Mpeg2CodecLevelLow = "LOW"
27357
27358	// Mpeg2CodecLevelMain is a Mpeg2CodecLevel enum value
27359	Mpeg2CodecLevelMain = "MAIN"
27360
27361	// Mpeg2CodecLevelHigh1440 is a Mpeg2CodecLevel enum value
27362	Mpeg2CodecLevelHigh1440 = "HIGH1440"
27363
27364	// Mpeg2CodecLevelHigh is a Mpeg2CodecLevel enum value
27365	Mpeg2CodecLevelHigh = "HIGH"
27366)
27367
27368// Mpeg2CodecLevel_Values returns all elements of the Mpeg2CodecLevel enum
27369func Mpeg2CodecLevel_Values() []string {
27370	return []string{
27371		Mpeg2CodecLevelAuto,
27372		Mpeg2CodecLevelLow,
27373		Mpeg2CodecLevelMain,
27374		Mpeg2CodecLevelHigh1440,
27375		Mpeg2CodecLevelHigh,
27376	}
27377}
27378
27379// Use Profile (Mpeg2CodecProfile) to set the MPEG-2 profile for the video output.
27380const (
27381	// Mpeg2CodecProfileMain is a Mpeg2CodecProfile enum value
27382	Mpeg2CodecProfileMain = "MAIN"
27383
27384	// Mpeg2CodecProfileProfile422 is a Mpeg2CodecProfile enum value
27385	Mpeg2CodecProfileProfile422 = "PROFILE_422"
27386)
27387
27388// Mpeg2CodecProfile_Values returns all elements of the Mpeg2CodecProfile enum
27389func Mpeg2CodecProfile_Values() []string {
27390	return []string{
27391		Mpeg2CodecProfileMain,
27392		Mpeg2CodecProfileProfile422,
27393	}
27394}
27395
27396// Choose Adaptive to improve subjective video quality for high-motion content.
27397// This will cause the service to use fewer B-frames (which infer information
27398// based on other frames) for high-motion portions of the video and more B-frames
27399// for low-motion portions. The maximum number of B-frames is limited by the
27400// value you provide for the setting B frames between reference frames (numberBFramesBetweenReferenceFrames).
27401const (
27402	// Mpeg2DynamicSubGopAdaptive is a Mpeg2DynamicSubGop enum value
27403	Mpeg2DynamicSubGopAdaptive = "ADAPTIVE"
27404
27405	// Mpeg2DynamicSubGopStatic is a Mpeg2DynamicSubGop enum value
27406	Mpeg2DynamicSubGopStatic = "STATIC"
27407)
27408
27409// Mpeg2DynamicSubGop_Values returns all elements of the Mpeg2DynamicSubGop enum
27410func Mpeg2DynamicSubGop_Values() []string {
27411	return []string{
27412		Mpeg2DynamicSubGopAdaptive,
27413		Mpeg2DynamicSubGopStatic,
27414	}
27415}
27416
27417// If you are using the console, use the Framerate setting to specify the frame
27418// rate for this output. If you want to keep the same frame rate as the input
27419// video, choose Follow source. If you want to do frame rate conversion, choose
27420// a frame rate from the dropdown list or choose Custom. The framerates shown
27421// in the dropdown list are decimal approximations of fractions. If you choose
27422// Custom, specify your frame rate as a fraction. If you are creating your transcoding
27423// job specification as a JSON file without the console, use FramerateControl
27424// to specify which value the service uses for the frame rate for this output.
27425// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
27426// from the input. Choose SPECIFIED if you want the service to use the frame
27427// rate you specify in the settings FramerateNumerator and FramerateDenominator.
27428const (
27429	// Mpeg2FramerateControlInitializeFromSource is a Mpeg2FramerateControl enum value
27430	Mpeg2FramerateControlInitializeFromSource = "INITIALIZE_FROM_SOURCE"
27431
27432	// Mpeg2FramerateControlSpecified is a Mpeg2FramerateControl enum value
27433	Mpeg2FramerateControlSpecified = "SPECIFIED"
27434)
27435
27436// Mpeg2FramerateControl_Values returns all elements of the Mpeg2FramerateControl enum
27437func Mpeg2FramerateControl_Values() []string {
27438	return []string{
27439		Mpeg2FramerateControlInitializeFromSource,
27440		Mpeg2FramerateControlSpecified,
27441	}
27442}
27443
27444// Choose the method that you want MediaConvert to use when increasing or decreasing
27445// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
27446// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
27447// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
27448// smooth picture, but might introduce undesirable video artifacts. For complex
27449// frame rate conversions, especially if your source video has already been
27450// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
27451// motion-compensated interpolation. FrameFormer chooses the best conversion
27452// method frame by frame. Note that using FrameFormer increases the transcoding
27453// time and incurs a significant add-on cost.
27454const (
27455	// Mpeg2FramerateConversionAlgorithmDuplicateDrop is a Mpeg2FramerateConversionAlgorithm enum value
27456	Mpeg2FramerateConversionAlgorithmDuplicateDrop = "DUPLICATE_DROP"
27457
27458	// Mpeg2FramerateConversionAlgorithmInterpolate is a Mpeg2FramerateConversionAlgorithm enum value
27459	Mpeg2FramerateConversionAlgorithmInterpolate = "INTERPOLATE"
27460
27461	// Mpeg2FramerateConversionAlgorithmFrameformer is a Mpeg2FramerateConversionAlgorithm enum value
27462	Mpeg2FramerateConversionAlgorithmFrameformer = "FRAMEFORMER"
27463)
27464
27465// Mpeg2FramerateConversionAlgorithm_Values returns all elements of the Mpeg2FramerateConversionAlgorithm enum
27466func Mpeg2FramerateConversionAlgorithm_Values() []string {
27467	return []string{
27468		Mpeg2FramerateConversionAlgorithmDuplicateDrop,
27469		Mpeg2FramerateConversionAlgorithmInterpolate,
27470		Mpeg2FramerateConversionAlgorithmFrameformer,
27471	}
27472}
27473
27474// Indicates if the GOP Size in MPEG2 is specified in frames or seconds. If
27475// seconds the system will convert the GOP Size into a frame count at run time.
27476const (
27477	// Mpeg2GopSizeUnitsFrames is a Mpeg2GopSizeUnits enum value
27478	Mpeg2GopSizeUnitsFrames = "FRAMES"
27479
27480	// Mpeg2GopSizeUnitsSeconds is a Mpeg2GopSizeUnits enum value
27481	Mpeg2GopSizeUnitsSeconds = "SECONDS"
27482)
27483
27484// Mpeg2GopSizeUnits_Values returns all elements of the Mpeg2GopSizeUnits enum
27485func Mpeg2GopSizeUnits_Values() []string {
27486	return []string{
27487		Mpeg2GopSizeUnitsFrames,
27488		Mpeg2GopSizeUnitsSeconds,
27489	}
27490}
27491
27492// Choose the scan line type for the output. Keep the default value, Progressive
27493// (PROGRESSIVE) to create a progressive output, regardless of the scan type
27494// of your input. Use Top field first (TOP_FIELD) or Bottom field first (BOTTOM_FIELD)
27495// to create an output that's interlaced with the same field polarity throughout.
27496// Use Follow, default top (FOLLOW_TOP_FIELD) or Follow, default bottom (FOLLOW_BOTTOM_FIELD)
27497// to produce outputs with the same field polarity as the source. For jobs that
27498// have multiple inputs, the output field polarity might change over the course
27499// of the output. Follow behavior depends on the input scan type. If the source
27500// is interlaced, the output will be interlaced with the same polarity as the
27501// source. If the source is progressive, the output will be interlaced with
27502// top field bottom field first, depending on which of the Follow options you
27503// choose.
27504const (
27505	// Mpeg2InterlaceModeProgressive is a Mpeg2InterlaceMode enum value
27506	Mpeg2InterlaceModeProgressive = "PROGRESSIVE"
27507
27508	// Mpeg2InterlaceModeTopField is a Mpeg2InterlaceMode enum value
27509	Mpeg2InterlaceModeTopField = "TOP_FIELD"
27510
27511	// Mpeg2InterlaceModeBottomField is a Mpeg2InterlaceMode enum value
27512	Mpeg2InterlaceModeBottomField = "BOTTOM_FIELD"
27513
27514	// Mpeg2InterlaceModeFollowTopField is a Mpeg2InterlaceMode enum value
27515	Mpeg2InterlaceModeFollowTopField = "FOLLOW_TOP_FIELD"
27516
27517	// Mpeg2InterlaceModeFollowBottomField is a Mpeg2InterlaceMode enum value
27518	Mpeg2InterlaceModeFollowBottomField = "FOLLOW_BOTTOM_FIELD"
27519)
27520
27521// Mpeg2InterlaceMode_Values returns all elements of the Mpeg2InterlaceMode enum
27522func Mpeg2InterlaceMode_Values() []string {
27523	return []string{
27524		Mpeg2InterlaceModeProgressive,
27525		Mpeg2InterlaceModeTopField,
27526		Mpeg2InterlaceModeBottomField,
27527		Mpeg2InterlaceModeFollowTopField,
27528		Mpeg2InterlaceModeFollowBottomField,
27529	}
27530}
27531
27532// Use Intra DC precision (Mpeg2IntraDcPrecision) to set quantization precision
27533// for intra-block DC coefficients. If you choose the value auto, the service
27534// will automatically select the precision based on the per-frame compression
27535// ratio.
27536const (
27537	// Mpeg2IntraDcPrecisionAuto is a Mpeg2IntraDcPrecision enum value
27538	Mpeg2IntraDcPrecisionAuto = "AUTO"
27539
27540	// Mpeg2IntraDcPrecisionIntraDcPrecision8 is a Mpeg2IntraDcPrecision enum value
27541	Mpeg2IntraDcPrecisionIntraDcPrecision8 = "INTRA_DC_PRECISION_8"
27542
27543	// Mpeg2IntraDcPrecisionIntraDcPrecision9 is a Mpeg2IntraDcPrecision enum value
27544	Mpeg2IntraDcPrecisionIntraDcPrecision9 = "INTRA_DC_PRECISION_9"
27545
27546	// Mpeg2IntraDcPrecisionIntraDcPrecision10 is a Mpeg2IntraDcPrecision enum value
27547	Mpeg2IntraDcPrecisionIntraDcPrecision10 = "INTRA_DC_PRECISION_10"
27548
27549	// Mpeg2IntraDcPrecisionIntraDcPrecision11 is a Mpeg2IntraDcPrecision enum value
27550	Mpeg2IntraDcPrecisionIntraDcPrecision11 = "INTRA_DC_PRECISION_11"
27551)
27552
27553// Mpeg2IntraDcPrecision_Values returns all elements of the Mpeg2IntraDcPrecision enum
27554func Mpeg2IntraDcPrecision_Values() []string {
27555	return []string{
27556		Mpeg2IntraDcPrecisionAuto,
27557		Mpeg2IntraDcPrecisionIntraDcPrecision8,
27558		Mpeg2IntraDcPrecisionIntraDcPrecision9,
27559		Mpeg2IntraDcPrecisionIntraDcPrecision10,
27560		Mpeg2IntraDcPrecisionIntraDcPrecision11,
27561	}
27562}
27563
27564// Optional. Specify how the service determines the pixel aspect ratio (PAR)
27565// for this output. The default behavior, Follow source (INITIALIZE_FROM_SOURCE),
27566// uses the PAR from your input video for your output. To specify a different
27567// PAR in the console, choose any value other than Follow source. To specify
27568// a different PAR by editing the JSON job specification, choose SPECIFIED.
27569// When you choose SPECIFIED for this setting, you must also specify values
27570// for the parNumerator and parDenominator settings.
27571const (
27572	// Mpeg2ParControlInitializeFromSource is a Mpeg2ParControl enum value
27573	Mpeg2ParControlInitializeFromSource = "INITIALIZE_FROM_SOURCE"
27574
27575	// Mpeg2ParControlSpecified is a Mpeg2ParControl enum value
27576	Mpeg2ParControlSpecified = "SPECIFIED"
27577)
27578
27579// Mpeg2ParControl_Values returns all elements of the Mpeg2ParControl enum
27580func Mpeg2ParControl_Values() []string {
27581	return []string{
27582		Mpeg2ParControlInitializeFromSource,
27583		Mpeg2ParControlSpecified,
27584	}
27585}
27586
27587// Optional. Use Quality tuning level (qualityTuningLevel) to choose how you
27588// want to trade off encoding speed for output video quality. The default behavior
27589// is faster, lower quality, single-pass encoding.
27590const (
27591	// Mpeg2QualityTuningLevelSinglePass is a Mpeg2QualityTuningLevel enum value
27592	Mpeg2QualityTuningLevelSinglePass = "SINGLE_PASS"
27593
27594	// Mpeg2QualityTuningLevelMultiPass is a Mpeg2QualityTuningLevel enum value
27595	Mpeg2QualityTuningLevelMultiPass = "MULTI_PASS"
27596)
27597
27598// Mpeg2QualityTuningLevel_Values returns all elements of the Mpeg2QualityTuningLevel enum
27599func Mpeg2QualityTuningLevel_Values() []string {
27600	return []string{
27601		Mpeg2QualityTuningLevelSinglePass,
27602		Mpeg2QualityTuningLevelMultiPass,
27603	}
27604}
27605
27606// Use Rate control mode (Mpeg2RateControlMode) to specify whether the bitrate
27607// is variable (vbr) or constant (cbr).
27608const (
27609	// Mpeg2RateControlModeVbr is a Mpeg2RateControlMode enum value
27610	Mpeg2RateControlModeVbr = "VBR"
27611
27612	// Mpeg2RateControlModeCbr is a Mpeg2RateControlMode enum value
27613	Mpeg2RateControlModeCbr = "CBR"
27614)
27615
27616// Mpeg2RateControlMode_Values returns all elements of the Mpeg2RateControlMode enum
27617func Mpeg2RateControlMode_Values() []string {
27618	return []string{
27619		Mpeg2RateControlModeVbr,
27620		Mpeg2RateControlModeCbr,
27621	}
27622}
27623
27624// Use this setting for interlaced outputs, when your output frame rate is half
27625// of your input frame rate. In this situation, choose Optimized interlacing
27626// (INTERLACED_OPTIMIZE) to create a better quality interlaced output. In this
27627// case, each progressive frame from the input corresponds to an interlaced
27628// field in the output. Keep the default value, Basic interlacing (INTERLACED),
27629// for all other output frame rates. With basic interlacing, MediaConvert performs
27630// any frame rate conversion first and then interlaces the frames. When you
27631// choose Optimized interlacing and you set your output frame rate to a value
27632// that isn't suitable for optimized interlacing, MediaConvert automatically
27633// falls back to basic interlacing. Required settings: To use optimized interlacing,
27634// you must set Telecine (telecine) to None (NONE) or Soft (SOFT). You can't
27635// use optimized interlacing for hard telecine outputs. You must also set Interlace
27636// mode (interlaceMode) to a value other than Progressive (PROGRESSIVE).
27637const (
27638	// Mpeg2ScanTypeConversionModeInterlaced is a Mpeg2ScanTypeConversionMode enum value
27639	Mpeg2ScanTypeConversionModeInterlaced = "INTERLACED"
27640
27641	// Mpeg2ScanTypeConversionModeInterlacedOptimize is a Mpeg2ScanTypeConversionMode enum value
27642	Mpeg2ScanTypeConversionModeInterlacedOptimize = "INTERLACED_OPTIMIZE"
27643)
27644
27645// Mpeg2ScanTypeConversionMode_Values returns all elements of the Mpeg2ScanTypeConversionMode enum
27646func Mpeg2ScanTypeConversionMode_Values() []string {
27647	return []string{
27648		Mpeg2ScanTypeConversionModeInterlaced,
27649		Mpeg2ScanTypeConversionModeInterlacedOptimize,
27650	}
27651}
27652
27653// Enable this setting to insert I-frames at scene changes that the service
27654// automatically detects. This improves video quality and is enabled by default.
27655const (
27656	// Mpeg2SceneChangeDetectDisabled is a Mpeg2SceneChangeDetect enum value
27657	Mpeg2SceneChangeDetectDisabled = "DISABLED"
27658
27659	// Mpeg2SceneChangeDetectEnabled is a Mpeg2SceneChangeDetect enum value
27660	Mpeg2SceneChangeDetectEnabled = "ENABLED"
27661)
27662
27663// Mpeg2SceneChangeDetect_Values returns all elements of the Mpeg2SceneChangeDetect enum
27664func Mpeg2SceneChangeDetect_Values() []string {
27665	return []string{
27666		Mpeg2SceneChangeDetectDisabled,
27667		Mpeg2SceneChangeDetectEnabled,
27668	}
27669}
27670
27671// Ignore this setting unless your input frame rate is 23.976 or 24 frames per
27672// second (fps). Enable slow PAL to create a 25 fps output. When you enable
27673// slow PAL, MediaConvert relabels the video frames to 25 fps and resamples
27674// your audio to keep it synchronized with the video. Note that enabling this
27675// setting will slightly reduce the duration of your video. Required settings:
27676// You must also set Framerate to 25. In your JSON job specification, set (framerateControl)
27677// to (SPECIFIED), (framerateNumerator) to 25 and (framerateDenominator) to
27678// 1.
27679const (
27680	// Mpeg2SlowPalDisabled is a Mpeg2SlowPal enum value
27681	Mpeg2SlowPalDisabled = "DISABLED"
27682
27683	// Mpeg2SlowPalEnabled is a Mpeg2SlowPal enum value
27684	Mpeg2SlowPalEnabled = "ENABLED"
27685)
27686
27687// Mpeg2SlowPal_Values returns all elements of the Mpeg2SlowPal enum
27688func Mpeg2SlowPal_Values() []string {
27689	return []string{
27690		Mpeg2SlowPalDisabled,
27691		Mpeg2SlowPalEnabled,
27692	}
27693}
27694
27695// Keep the default value, Enabled (ENABLED), to adjust quantization within
27696// each frame based on spatial variation of content complexity. When you enable
27697// this feature, the encoder uses fewer bits on areas that can sustain more
27698// distortion with no noticeable visual degradation and uses more bits on areas
27699// where any small distortion will be noticeable. For example, complex textured
27700// blocks are encoded with fewer bits and smooth textured blocks are encoded
27701// with more bits. Enabling this feature will almost always improve your video
27702// quality. Note, though, that this feature doesn't take into account where
27703// the viewer's attention is likely to be. If viewers are likely to be focusing
27704// their attention on a part of the screen with a lot of complex texture, you
27705// might choose to disable this feature. Related setting: When you enable spatial
27706// adaptive quantization, set the value for Adaptive quantization (adaptiveQuantization)
27707// depending on your content. For homogeneous content, such as cartoons and
27708// video games, set it to Low. For content with a wider variety of textures,
27709// set it to High or Higher.
27710const (
27711	// Mpeg2SpatialAdaptiveQuantizationDisabled is a Mpeg2SpatialAdaptiveQuantization enum value
27712	Mpeg2SpatialAdaptiveQuantizationDisabled = "DISABLED"
27713
27714	// Mpeg2SpatialAdaptiveQuantizationEnabled is a Mpeg2SpatialAdaptiveQuantization enum value
27715	Mpeg2SpatialAdaptiveQuantizationEnabled = "ENABLED"
27716)
27717
27718// Mpeg2SpatialAdaptiveQuantization_Values returns all elements of the Mpeg2SpatialAdaptiveQuantization enum
27719func Mpeg2SpatialAdaptiveQuantization_Values() []string {
27720	return []string{
27721		Mpeg2SpatialAdaptiveQuantizationDisabled,
27722		Mpeg2SpatialAdaptiveQuantizationEnabled,
27723	}
27724}
27725
27726// Specify whether this output's video uses the D10 syntax. Keep the default
27727// value to not use the syntax. Related settings: When you choose D10 (D_10)
27728// for your MXF profile (profile), you must also set this value to to D10 (D_10).
27729const (
27730	// Mpeg2SyntaxDefault is a Mpeg2Syntax enum value
27731	Mpeg2SyntaxDefault = "DEFAULT"
27732
27733	// Mpeg2SyntaxD10 is a Mpeg2Syntax enum value
27734	Mpeg2SyntaxD10 = "D_10"
27735)
27736
27737// Mpeg2Syntax_Values returns all elements of the Mpeg2Syntax enum
27738func Mpeg2Syntax_Values() []string {
27739	return []string{
27740		Mpeg2SyntaxDefault,
27741		Mpeg2SyntaxD10,
27742	}
27743}
27744
27745// When you do frame rate conversion from 23.976 frames per second (fps) to
27746// 29.97 fps, and your output scan type is interlaced, you can optionally enable
27747// hard or soft telecine to create a smoother picture. Hard telecine (HARD)
27748// produces a 29.97i output. Soft telecine (SOFT) produces an output with a
27749// 23.976 output that signals to the video player device to do the conversion
27750// during play back. When you keep the default value, None (NONE), MediaConvert
27751// does a standard frame rate conversion to 29.97 without doing anything with
27752// the field polarity to create a smoother picture.
27753const (
27754	// Mpeg2TelecineNone is a Mpeg2Telecine enum value
27755	Mpeg2TelecineNone = "NONE"
27756
27757	// Mpeg2TelecineSoft is a Mpeg2Telecine enum value
27758	Mpeg2TelecineSoft = "SOFT"
27759
27760	// Mpeg2TelecineHard is a Mpeg2Telecine enum value
27761	Mpeg2TelecineHard = "HARD"
27762)
27763
27764// Mpeg2Telecine_Values returns all elements of the Mpeg2Telecine enum
27765func Mpeg2Telecine_Values() []string {
27766	return []string{
27767		Mpeg2TelecineNone,
27768		Mpeg2TelecineSoft,
27769		Mpeg2TelecineHard,
27770	}
27771}
27772
27773// Keep the default value, Enabled (ENABLED), to adjust quantization within
27774// each frame based on temporal variation of content complexity. When you enable
27775// this feature, the encoder uses fewer bits on areas of the frame that aren't
27776// moving and uses more bits on complex objects with sharp edges that move a
27777// lot. For example, this feature improves the readability of text tickers on
27778// newscasts and scoreboards on sports matches. Enabling this feature will almost
27779// always improve your video quality. Note, though, that this feature doesn't
27780// take into account where the viewer's attention is likely to be. If viewers
27781// are likely to be focusing their attention on a part of the screen that doesn't
27782// have moving objects with sharp edges, such as sports athletes' faces, you
27783// might choose to disable this feature. Related setting: When you enable temporal
27784// quantization, adjust the strength of the filter with the setting Adaptive
27785// quantization (adaptiveQuantization).
27786const (
27787	// Mpeg2TemporalAdaptiveQuantizationDisabled is a Mpeg2TemporalAdaptiveQuantization enum value
27788	Mpeg2TemporalAdaptiveQuantizationDisabled = "DISABLED"
27789
27790	// Mpeg2TemporalAdaptiveQuantizationEnabled is a Mpeg2TemporalAdaptiveQuantization enum value
27791	Mpeg2TemporalAdaptiveQuantizationEnabled = "ENABLED"
27792)
27793
27794// Mpeg2TemporalAdaptiveQuantization_Values returns all elements of the Mpeg2TemporalAdaptiveQuantization enum
27795func Mpeg2TemporalAdaptiveQuantization_Values() []string {
27796	return []string{
27797		Mpeg2TemporalAdaptiveQuantizationDisabled,
27798		Mpeg2TemporalAdaptiveQuantizationEnabled,
27799	}
27800}
27801
27802// COMBINE_DUPLICATE_STREAMS combines identical audio encoding settings across
27803// a Microsoft Smooth output group into a single audio stream.
27804const (
27805	// MsSmoothAudioDeduplicationCombineDuplicateStreams is a MsSmoothAudioDeduplication enum value
27806	MsSmoothAudioDeduplicationCombineDuplicateStreams = "COMBINE_DUPLICATE_STREAMS"
27807
27808	// MsSmoothAudioDeduplicationNone is a MsSmoothAudioDeduplication enum value
27809	MsSmoothAudioDeduplicationNone = "NONE"
27810)
27811
27812// MsSmoothAudioDeduplication_Values returns all elements of the MsSmoothAudioDeduplication enum
27813func MsSmoothAudioDeduplication_Values() []string {
27814	return []string{
27815		MsSmoothAudioDeduplicationCombineDuplicateStreams,
27816		MsSmoothAudioDeduplicationNone,
27817	}
27818}
27819
27820// Use Manifest encoding (MsSmoothManifestEncoding) to specify the encoding
27821// format for the server and client manifest. Valid options are utf8 and utf16.
27822const (
27823	// MsSmoothManifestEncodingUtf8 is a MsSmoothManifestEncoding enum value
27824	MsSmoothManifestEncodingUtf8 = "UTF8"
27825
27826	// MsSmoothManifestEncodingUtf16 is a MsSmoothManifestEncoding enum value
27827	MsSmoothManifestEncodingUtf16 = "UTF16"
27828)
27829
27830// MsSmoothManifestEncoding_Values returns all elements of the MsSmoothManifestEncoding enum
27831func MsSmoothManifestEncoding_Values() []string {
27832	return []string{
27833		MsSmoothManifestEncodingUtf8,
27834		MsSmoothManifestEncodingUtf16,
27835	}
27836}
27837
27838// Optional. When you have AFD signaling set up in your output video stream,
27839// use this setting to choose whether to also include it in the MXF wrapper.
27840// Choose Don't copy (NO_COPY) to exclude AFD signaling from the MXF wrapper.
27841// Choose Copy from video stream (COPY_FROM_VIDEO) to copy the AFD values from
27842// the video stream for this output to the MXF wrapper. Regardless of which
27843// option you choose, the AFD values remain in the video stream. Related settings:
27844// To set up your output to include or exclude AFD values, see AfdSignaling,
27845// under VideoDescription. On the console, find AFD signaling under the output's
27846// video encoding settings.
27847const (
27848	// MxfAfdSignalingNoCopy is a MxfAfdSignaling enum value
27849	MxfAfdSignalingNoCopy = "NO_COPY"
27850
27851	// MxfAfdSignalingCopyFromVideo is a MxfAfdSignaling enum value
27852	MxfAfdSignalingCopyFromVideo = "COPY_FROM_VIDEO"
27853)
27854
27855// MxfAfdSignaling_Values returns all elements of the MxfAfdSignaling enum
27856func MxfAfdSignaling_Values() []string {
27857	return []string{
27858		MxfAfdSignalingNoCopy,
27859		MxfAfdSignalingCopyFromVideo,
27860	}
27861}
27862
27863// Specify the MXF profile, also called shim, for this output. When you choose
27864// Auto, MediaConvert chooses a profile based on the video codec and resolution.
27865// For a list of codecs supported with each MXF profile, see https://docs.aws.amazon.com/mediaconvert/latest/ug/codecs-supported-with-each-mxf-profile.html.
27866// For more information about the automatic selection behavior, see https://docs.aws.amazon.com/mediaconvert/latest/ug/default-automatic-selection-of-mxf-profiles.html.
27867const (
27868	// MxfProfileD10 is a MxfProfile enum value
27869	MxfProfileD10 = "D_10"
27870
27871	// MxfProfileXdcam is a MxfProfile enum value
27872	MxfProfileXdcam = "XDCAM"
27873
27874	// MxfProfileOp1a is a MxfProfile enum value
27875	MxfProfileOp1a = "OP1A"
27876)
27877
27878// MxfProfile_Values returns all elements of the MxfProfile enum
27879func MxfProfile_Values() []string {
27880	return []string{
27881		MxfProfileD10,
27882		MxfProfileXdcam,
27883		MxfProfileOp1a,
27884	}
27885}
27886
27887// Choose the type of Nielsen watermarks that you want in your outputs. When
27888// you choose NAES 2 and NW (NAES2_AND_NW), you must provide a value for the
27889// setting SID (sourceId). When you choose CBET (CBET), you must provide a value
27890// for the setting CSID (cbetSourceId). When you choose NAES 2, NW, and CBET
27891// (NAES2_AND_NW_AND_CBET), you must provide values for both of these settings.
27892const (
27893	// NielsenActiveWatermarkProcessTypeNaes2AndNw is a NielsenActiveWatermarkProcessType enum value
27894	NielsenActiveWatermarkProcessTypeNaes2AndNw = "NAES2_AND_NW"
27895
27896	// NielsenActiveWatermarkProcessTypeCbet is a NielsenActiveWatermarkProcessType enum value
27897	NielsenActiveWatermarkProcessTypeCbet = "CBET"
27898
27899	// NielsenActiveWatermarkProcessTypeNaes2AndNwAndCbet is a NielsenActiveWatermarkProcessType enum value
27900	NielsenActiveWatermarkProcessTypeNaes2AndNwAndCbet = "NAES2_AND_NW_AND_CBET"
27901)
27902
27903// NielsenActiveWatermarkProcessType_Values returns all elements of the NielsenActiveWatermarkProcessType enum
27904func NielsenActiveWatermarkProcessType_Values() []string {
27905	return []string{
27906		NielsenActiveWatermarkProcessTypeNaes2AndNw,
27907		NielsenActiveWatermarkProcessTypeCbet,
27908		NielsenActiveWatermarkProcessTypeNaes2AndNwAndCbet,
27909	}
27910}
27911
27912// Required. Specify whether your source content already contains Nielsen non-linear
27913// watermarks. When you set this value to Watermarked (WATERMARKED), the service
27914// fails the job. Nielsen requires that you add non-linear watermarking to only
27915// clean content that doesn't already have non-linear Nielsen watermarks.
27916const (
27917	// NielsenSourceWatermarkStatusTypeClean is a NielsenSourceWatermarkStatusType enum value
27918	NielsenSourceWatermarkStatusTypeClean = "CLEAN"
27919
27920	// NielsenSourceWatermarkStatusTypeWatermarked is a NielsenSourceWatermarkStatusType enum value
27921	NielsenSourceWatermarkStatusTypeWatermarked = "WATERMARKED"
27922)
27923
27924// NielsenSourceWatermarkStatusType_Values returns all elements of the NielsenSourceWatermarkStatusType enum
27925func NielsenSourceWatermarkStatusType_Values() []string {
27926	return []string{
27927		NielsenSourceWatermarkStatusTypeClean,
27928		NielsenSourceWatermarkStatusTypeWatermarked,
27929	}
27930}
27931
27932// To create assets that have the same TIC values in each audio track, keep
27933// the default value Share TICs (SAME_TICS_PER_TRACK). To create assets that
27934// have unique TIC values for each audio track, choose Use unique TICs (RESERVE_UNIQUE_TICS_PER_TRACK).
27935const (
27936	// NielsenUniqueTicPerAudioTrackTypeReserveUniqueTicsPerTrack is a NielsenUniqueTicPerAudioTrackType enum value
27937	NielsenUniqueTicPerAudioTrackTypeReserveUniqueTicsPerTrack = "RESERVE_UNIQUE_TICS_PER_TRACK"
27938
27939	// NielsenUniqueTicPerAudioTrackTypeSameTicsPerTrack is a NielsenUniqueTicPerAudioTrackType enum value
27940	NielsenUniqueTicPerAudioTrackTypeSameTicsPerTrack = "SAME_TICS_PER_TRACK"
27941)
27942
27943// NielsenUniqueTicPerAudioTrackType_Values returns all elements of the NielsenUniqueTicPerAudioTrackType enum
27944func NielsenUniqueTicPerAudioTrackType_Values() []string {
27945	return []string{
27946		NielsenUniqueTicPerAudioTrackTypeReserveUniqueTicsPerTrack,
27947		NielsenUniqueTicPerAudioTrackTypeSameTicsPerTrack,
27948	}
27949}
27950
27951// Optional. When you set Noise reducer (noiseReducer) to Temporal (TEMPORAL),
27952// you can use this setting to apply sharpening. The default behavior, Auto
27953// (AUTO), allows the transcoder to determine whether to apply filtering, depending
27954// on input type and quality. When you set Noise reducer to Temporal, your output
27955// bandwidth is reduced. When Post temporal sharpening is also enabled, that
27956// bandwidth reduction is smaller.
27957const (
27958	// NoiseFilterPostTemporalSharpeningDisabled is a NoiseFilterPostTemporalSharpening enum value
27959	NoiseFilterPostTemporalSharpeningDisabled = "DISABLED"
27960
27961	// NoiseFilterPostTemporalSharpeningEnabled is a NoiseFilterPostTemporalSharpening enum value
27962	NoiseFilterPostTemporalSharpeningEnabled = "ENABLED"
27963
27964	// NoiseFilterPostTemporalSharpeningAuto is a NoiseFilterPostTemporalSharpening enum value
27965	NoiseFilterPostTemporalSharpeningAuto = "AUTO"
27966)
27967
27968// NoiseFilterPostTemporalSharpening_Values returns all elements of the NoiseFilterPostTemporalSharpening enum
27969func NoiseFilterPostTemporalSharpening_Values() []string {
27970	return []string{
27971		NoiseFilterPostTemporalSharpeningDisabled,
27972		NoiseFilterPostTemporalSharpeningEnabled,
27973		NoiseFilterPostTemporalSharpeningAuto,
27974	}
27975}
27976
27977// Use Noise reducer filter (NoiseReducerFilter) to select one of the following
27978// spatial image filtering functions. To use this setting, you must also enable
27979// Noise reducer (NoiseReducer). * Bilateral preserves edges while reducing
27980// noise. * Mean (softest), Gaussian, Lanczos, and Sharpen (sharpest) do convolution
27981// filtering. * Conserve does min/max noise reduction. * Spatial does frequency-domain
27982// filtering based on JND principles. * Temporal optimizes video quality for
27983// complex motion.
27984const (
27985	// NoiseReducerFilterBilateral is a NoiseReducerFilter enum value
27986	NoiseReducerFilterBilateral = "BILATERAL"
27987
27988	// NoiseReducerFilterMean is a NoiseReducerFilter enum value
27989	NoiseReducerFilterMean = "MEAN"
27990
27991	// NoiseReducerFilterGaussian is a NoiseReducerFilter enum value
27992	NoiseReducerFilterGaussian = "GAUSSIAN"
27993
27994	// NoiseReducerFilterLanczos is a NoiseReducerFilter enum value
27995	NoiseReducerFilterLanczos = "LANCZOS"
27996
27997	// NoiseReducerFilterSharpen is a NoiseReducerFilter enum value
27998	NoiseReducerFilterSharpen = "SHARPEN"
27999
28000	// NoiseReducerFilterConserve is a NoiseReducerFilter enum value
28001	NoiseReducerFilterConserve = "CONSERVE"
28002
28003	// NoiseReducerFilterSpatial is a NoiseReducerFilter enum value
28004	NoiseReducerFilterSpatial = "SPATIAL"
28005
28006	// NoiseReducerFilterTemporal is a NoiseReducerFilter enum value
28007	NoiseReducerFilterTemporal = "TEMPORAL"
28008)
28009
28010// NoiseReducerFilter_Values returns all elements of the NoiseReducerFilter enum
28011func NoiseReducerFilter_Values() []string {
28012	return []string{
28013		NoiseReducerFilterBilateral,
28014		NoiseReducerFilterMean,
28015		NoiseReducerFilterGaussian,
28016		NoiseReducerFilterLanczos,
28017		NoiseReducerFilterSharpen,
28018		NoiseReducerFilterConserve,
28019		NoiseReducerFilterSpatial,
28020		NoiseReducerFilterTemporal,
28021	}
28022}
28023
28024// Optional. When you request lists of resources, you can specify whether they
28025// are sorted in ASCENDING or DESCENDING order. Default varies by resource.
28026const (
28027	// OrderAscending is a Order enum value
28028	OrderAscending = "ASCENDING"
28029
28030	// OrderDescending is a Order enum value
28031	OrderDescending = "DESCENDING"
28032)
28033
28034// Order_Values returns all elements of the Order enum
28035func Order_Values() []string {
28036	return []string{
28037		OrderAscending,
28038		OrderDescending,
28039	}
28040}
28041
28042// Type of output group (File group, Apple HLS, DASH ISO, Microsoft Smooth Streaming,
28043// CMAF)
28044const (
28045	// OutputGroupTypeHlsGroupSettings is a OutputGroupType enum value
28046	OutputGroupTypeHlsGroupSettings = "HLS_GROUP_SETTINGS"
28047
28048	// OutputGroupTypeDashIsoGroupSettings is a OutputGroupType enum value
28049	OutputGroupTypeDashIsoGroupSettings = "DASH_ISO_GROUP_SETTINGS"
28050
28051	// OutputGroupTypeFileGroupSettings is a OutputGroupType enum value
28052	OutputGroupTypeFileGroupSettings = "FILE_GROUP_SETTINGS"
28053
28054	// OutputGroupTypeMsSmoothGroupSettings is a OutputGroupType enum value
28055	OutputGroupTypeMsSmoothGroupSettings = "MS_SMOOTH_GROUP_SETTINGS"
28056
28057	// OutputGroupTypeCmafGroupSettings is a OutputGroupType enum value
28058	OutputGroupTypeCmafGroupSettings = "CMAF_GROUP_SETTINGS"
28059)
28060
28061// OutputGroupType_Values returns all elements of the OutputGroupType enum
28062func OutputGroupType_Values() []string {
28063	return []string{
28064		OutputGroupTypeHlsGroupSettings,
28065		OutputGroupTypeDashIsoGroupSettings,
28066		OutputGroupTypeFileGroupSettings,
28067		OutputGroupTypeMsSmoothGroupSettings,
28068		OutputGroupTypeCmafGroupSettings,
28069	}
28070}
28071
28072// Selects method of inserting SDT information into output stream. "Follow input
28073// SDT" copies SDT information from input stream to output stream. "Follow input
28074// SDT if present" copies SDT information from input stream to output stream
28075// if SDT information is present in the input, otherwise it will fall back on
28076// the user-defined values. Enter "SDT Manually" means user will enter the SDT
28077// information. "No SDT" means output stream will not contain SDT information.
28078const (
28079	// OutputSdtSdtFollow is a OutputSdt enum value
28080	OutputSdtSdtFollow = "SDT_FOLLOW"
28081
28082	// OutputSdtSdtFollowIfPresent is a OutputSdt enum value
28083	OutputSdtSdtFollowIfPresent = "SDT_FOLLOW_IF_PRESENT"
28084
28085	// OutputSdtSdtManual is a OutputSdt enum value
28086	OutputSdtSdtManual = "SDT_MANUAL"
28087
28088	// OutputSdtSdtNone is a OutputSdt enum value
28089	OutputSdtSdtNone = "SDT_NONE"
28090)
28091
28092// OutputSdt_Values returns all elements of the OutputSdt enum
28093func OutputSdt_Values() []string {
28094	return []string{
28095		OutputSdtSdtFollow,
28096		OutputSdtSdtFollowIfPresent,
28097		OutputSdtSdtManual,
28098		OutputSdtSdtNone,
28099	}
28100}
28101
28102// Optional. When you request a list of presets, you can choose to list them
28103// alphabetically by NAME or chronologically by CREATION_DATE. If you don't
28104// specify, the service will list them by name.
28105const (
28106	// PresetListByName is a PresetListBy enum value
28107	PresetListByName = "NAME"
28108
28109	// PresetListByCreationDate is a PresetListBy enum value
28110	PresetListByCreationDate = "CREATION_DATE"
28111
28112	// PresetListBySystem is a PresetListBy enum value
28113	PresetListBySystem = "SYSTEM"
28114)
28115
28116// PresetListBy_Values returns all elements of the PresetListBy enum
28117func PresetListBy_Values() []string {
28118	return []string{
28119		PresetListByName,
28120		PresetListByCreationDate,
28121		PresetListBySystem,
28122	}
28123}
28124
28125// Specifies whether the pricing plan for the queue is on-demand or reserved.
28126// For on-demand, you pay per minute, billed in increments of .01 minute. For
28127// reserved, you pay for the transcoding capacity of the entire queue, regardless
28128// of how much or how little you use it. Reserved pricing requires a 12-month
28129// commitment.
28130const (
28131	// PricingPlanOnDemand is a PricingPlan enum value
28132	PricingPlanOnDemand = "ON_DEMAND"
28133
28134	// PricingPlanReserved is a PricingPlan enum value
28135	PricingPlanReserved = "RESERVED"
28136)
28137
28138// PricingPlan_Values returns all elements of the PricingPlan enum
28139func PricingPlan_Values() []string {
28140	return []string{
28141		PricingPlanOnDemand,
28142		PricingPlanReserved,
28143	}
28144}
28145
28146// Use Profile (ProResCodecProfile) to specify the type of Apple ProRes codec
28147// to use for this output.
28148const (
28149	// ProresCodecProfileAppleProres422 is a ProresCodecProfile enum value
28150	ProresCodecProfileAppleProres422 = "APPLE_PRORES_422"
28151
28152	// ProresCodecProfileAppleProres422Hq is a ProresCodecProfile enum value
28153	ProresCodecProfileAppleProres422Hq = "APPLE_PRORES_422_HQ"
28154
28155	// ProresCodecProfileAppleProres422Lt is a ProresCodecProfile enum value
28156	ProresCodecProfileAppleProres422Lt = "APPLE_PRORES_422_LT"
28157
28158	// ProresCodecProfileAppleProres422Proxy is a ProresCodecProfile enum value
28159	ProresCodecProfileAppleProres422Proxy = "APPLE_PRORES_422_PROXY"
28160)
28161
28162// ProresCodecProfile_Values returns all elements of the ProresCodecProfile enum
28163func ProresCodecProfile_Values() []string {
28164	return []string{
28165		ProresCodecProfileAppleProres422,
28166		ProresCodecProfileAppleProres422Hq,
28167		ProresCodecProfileAppleProres422Lt,
28168		ProresCodecProfileAppleProres422Proxy,
28169	}
28170}
28171
28172// If you are using the console, use the Framerate setting to specify the frame
28173// rate for this output. If you want to keep the same frame rate as the input
28174// video, choose Follow source. If you want to do frame rate conversion, choose
28175// a frame rate from the dropdown list or choose Custom. The framerates shown
28176// in the dropdown list are decimal approximations of fractions. If you choose
28177// Custom, specify your frame rate as a fraction. If you are creating your transcoding
28178// job specification as a JSON file without the console, use FramerateControl
28179// to specify which value the service uses for the frame rate for this output.
28180// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
28181// from the input. Choose SPECIFIED if you want the service to use the frame
28182// rate you specify in the settings FramerateNumerator and FramerateDenominator.
28183const (
28184	// ProresFramerateControlInitializeFromSource is a ProresFramerateControl enum value
28185	ProresFramerateControlInitializeFromSource = "INITIALIZE_FROM_SOURCE"
28186
28187	// ProresFramerateControlSpecified is a ProresFramerateControl enum value
28188	ProresFramerateControlSpecified = "SPECIFIED"
28189)
28190
28191// ProresFramerateControl_Values returns all elements of the ProresFramerateControl enum
28192func ProresFramerateControl_Values() []string {
28193	return []string{
28194		ProresFramerateControlInitializeFromSource,
28195		ProresFramerateControlSpecified,
28196	}
28197}
28198
28199// Choose the method that you want MediaConvert to use when increasing or decreasing
28200// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
28201// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
28202// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
28203// smooth picture, but might introduce undesirable video artifacts. For complex
28204// frame rate conversions, especially if your source video has already been
28205// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
28206// motion-compensated interpolation. FrameFormer chooses the best conversion
28207// method frame by frame. Note that using FrameFormer increases the transcoding
28208// time and incurs a significant add-on cost.
28209const (
28210	// ProresFramerateConversionAlgorithmDuplicateDrop is a ProresFramerateConversionAlgorithm enum value
28211	ProresFramerateConversionAlgorithmDuplicateDrop = "DUPLICATE_DROP"
28212
28213	// ProresFramerateConversionAlgorithmInterpolate is a ProresFramerateConversionAlgorithm enum value
28214	ProresFramerateConversionAlgorithmInterpolate = "INTERPOLATE"
28215
28216	// ProresFramerateConversionAlgorithmFrameformer is a ProresFramerateConversionAlgorithm enum value
28217	ProresFramerateConversionAlgorithmFrameformer = "FRAMEFORMER"
28218)
28219
28220// ProresFramerateConversionAlgorithm_Values returns all elements of the ProresFramerateConversionAlgorithm enum
28221func ProresFramerateConversionAlgorithm_Values() []string {
28222	return []string{
28223		ProresFramerateConversionAlgorithmDuplicateDrop,
28224		ProresFramerateConversionAlgorithmInterpolate,
28225		ProresFramerateConversionAlgorithmFrameformer,
28226	}
28227}
28228
28229// Choose the scan line type for the output. Keep the default value, Progressive
28230// (PROGRESSIVE) to create a progressive output, regardless of the scan type
28231// of your input. Use Top field first (TOP_FIELD) or Bottom field first (BOTTOM_FIELD)
28232// to create an output that's interlaced with the same field polarity throughout.
28233// Use Follow, default top (FOLLOW_TOP_FIELD) or Follow, default bottom (FOLLOW_BOTTOM_FIELD)
28234// to produce outputs with the same field polarity as the source. For jobs that
28235// have multiple inputs, the output field polarity might change over the course
28236// of the output. Follow behavior depends on the input scan type. If the source
28237// is interlaced, the output will be interlaced with the same polarity as the
28238// source. If the source is progressive, the output will be interlaced with
28239// top field bottom field first, depending on which of the Follow options you
28240// choose.
28241const (
28242	// ProresInterlaceModeProgressive is a ProresInterlaceMode enum value
28243	ProresInterlaceModeProgressive = "PROGRESSIVE"
28244
28245	// ProresInterlaceModeTopField is a ProresInterlaceMode enum value
28246	ProresInterlaceModeTopField = "TOP_FIELD"
28247
28248	// ProresInterlaceModeBottomField is a ProresInterlaceMode enum value
28249	ProresInterlaceModeBottomField = "BOTTOM_FIELD"
28250
28251	// ProresInterlaceModeFollowTopField is a ProresInterlaceMode enum value
28252	ProresInterlaceModeFollowTopField = "FOLLOW_TOP_FIELD"
28253
28254	// ProresInterlaceModeFollowBottomField is a ProresInterlaceMode enum value
28255	ProresInterlaceModeFollowBottomField = "FOLLOW_BOTTOM_FIELD"
28256)
28257
28258// ProresInterlaceMode_Values returns all elements of the ProresInterlaceMode enum
28259func ProresInterlaceMode_Values() []string {
28260	return []string{
28261		ProresInterlaceModeProgressive,
28262		ProresInterlaceModeTopField,
28263		ProresInterlaceModeBottomField,
28264		ProresInterlaceModeFollowTopField,
28265		ProresInterlaceModeFollowBottomField,
28266	}
28267}
28268
28269// Optional. Specify how the service determines the pixel aspect ratio (PAR)
28270// for this output. The default behavior, Follow source (INITIALIZE_FROM_SOURCE),
28271// uses the PAR from your input video for your output. To specify a different
28272// PAR in the console, choose any value other than Follow source. To specify
28273// a different PAR by editing the JSON job specification, choose SPECIFIED.
28274// When you choose SPECIFIED for this setting, you must also specify values
28275// for the parNumerator and parDenominator settings.
28276const (
28277	// ProresParControlInitializeFromSource is a ProresParControl enum value
28278	ProresParControlInitializeFromSource = "INITIALIZE_FROM_SOURCE"
28279
28280	// ProresParControlSpecified is a ProresParControl enum value
28281	ProresParControlSpecified = "SPECIFIED"
28282)
28283
28284// ProresParControl_Values returns all elements of the ProresParControl enum
28285func ProresParControl_Values() []string {
28286	return []string{
28287		ProresParControlInitializeFromSource,
28288		ProresParControlSpecified,
28289	}
28290}
28291
28292// Use this setting for interlaced outputs, when your output frame rate is half
28293// of your input frame rate. In this situation, choose Optimized interlacing
28294// (INTERLACED_OPTIMIZE) to create a better quality interlaced output. In this
28295// case, each progressive frame from the input corresponds to an interlaced
28296// field in the output. Keep the default value, Basic interlacing (INTERLACED),
28297// for all other output frame rates. With basic interlacing, MediaConvert performs
28298// any frame rate conversion first and then interlaces the frames. When you
28299// choose Optimized interlacing and you set your output frame rate to a value
28300// that isn't suitable for optimized interlacing, MediaConvert automatically
28301// falls back to basic interlacing. Required settings: To use optimized interlacing,
28302// you must set Telecine (telecine) to None (NONE) or Soft (SOFT). You can't
28303// use optimized interlacing for hard telecine outputs. You must also set Interlace
28304// mode (interlaceMode) to a value other than Progressive (PROGRESSIVE).
28305const (
28306	// ProresScanTypeConversionModeInterlaced is a ProresScanTypeConversionMode enum value
28307	ProresScanTypeConversionModeInterlaced = "INTERLACED"
28308
28309	// ProresScanTypeConversionModeInterlacedOptimize is a ProresScanTypeConversionMode enum value
28310	ProresScanTypeConversionModeInterlacedOptimize = "INTERLACED_OPTIMIZE"
28311)
28312
28313// ProresScanTypeConversionMode_Values returns all elements of the ProresScanTypeConversionMode enum
28314func ProresScanTypeConversionMode_Values() []string {
28315	return []string{
28316		ProresScanTypeConversionModeInterlaced,
28317		ProresScanTypeConversionModeInterlacedOptimize,
28318	}
28319}
28320
28321// Ignore this setting unless your input frame rate is 23.976 or 24 frames per
28322// second (fps). Enable slow PAL to create a 25 fps output. When you enable
28323// slow PAL, MediaConvert relabels the video frames to 25 fps and resamples
28324// your audio to keep it synchronized with the video. Note that enabling this
28325// setting will slightly reduce the duration of your video. Required settings:
28326// You must also set Framerate to 25. In your JSON job specification, set (framerateControl)
28327// to (SPECIFIED), (framerateNumerator) to 25 and (framerateDenominator) to
28328// 1.
28329const (
28330	// ProresSlowPalDisabled is a ProresSlowPal enum value
28331	ProresSlowPalDisabled = "DISABLED"
28332
28333	// ProresSlowPalEnabled is a ProresSlowPal enum value
28334	ProresSlowPalEnabled = "ENABLED"
28335)
28336
28337// ProresSlowPal_Values returns all elements of the ProresSlowPal enum
28338func ProresSlowPal_Values() []string {
28339	return []string{
28340		ProresSlowPalDisabled,
28341		ProresSlowPalEnabled,
28342	}
28343}
28344
28345// When you do frame rate conversion from 23.976 frames per second (fps) to
28346// 29.97 fps, and your output scan type is interlaced, you can optionally enable
28347// hard telecine (HARD) to create a smoother picture. When you keep the default
28348// value, None (NONE), MediaConvert does a standard frame rate conversion to
28349// 29.97 without doing anything with the field polarity to create a smoother
28350// picture.
28351const (
28352	// ProresTelecineNone is a ProresTelecine enum value
28353	ProresTelecineNone = "NONE"
28354
28355	// ProresTelecineHard is a ProresTelecine enum value
28356	ProresTelecineHard = "HARD"
28357)
28358
28359// ProresTelecine_Values returns all elements of the ProresTelecine enum
28360func ProresTelecine_Values() []string {
28361	return []string{
28362		ProresTelecineNone,
28363		ProresTelecineHard,
28364	}
28365}
28366
28367// Optional. When you request a list of queues, you can choose to list them
28368// alphabetically by NAME or chronologically by CREATION_DATE. If you don't
28369// specify, the service will list them by creation date.
28370const (
28371	// QueueListByName is a QueueListBy enum value
28372	QueueListByName = "NAME"
28373
28374	// QueueListByCreationDate is a QueueListBy enum value
28375	QueueListByCreationDate = "CREATION_DATE"
28376)
28377
28378// QueueListBy_Values returns all elements of the QueueListBy enum
28379func QueueListBy_Values() []string {
28380	return []string{
28381		QueueListByName,
28382		QueueListByCreationDate,
28383	}
28384}
28385
28386// Queues can be ACTIVE or PAUSED. If you pause a queue, jobs in that queue
28387// won't begin. Jobs that are running when you pause a queue continue to run
28388// until they finish or result in an error.
28389const (
28390	// QueueStatusActive is a QueueStatus enum value
28391	QueueStatusActive = "ACTIVE"
28392
28393	// QueueStatusPaused is a QueueStatus enum value
28394	QueueStatusPaused = "PAUSED"
28395)
28396
28397// QueueStatus_Values returns all elements of the QueueStatus enum
28398func QueueStatus_Values() []string {
28399	return []string{
28400		QueueStatusActive,
28401		QueueStatusPaused,
28402	}
28403}
28404
28405// Specifies whether the term of your reserved queue pricing plan is automatically
28406// extended (AUTO_RENEW) or expires (EXPIRE) at the end of the term.
28407const (
28408	// RenewalTypeAutoRenew is a RenewalType enum value
28409	RenewalTypeAutoRenew = "AUTO_RENEW"
28410
28411	// RenewalTypeExpire is a RenewalType enum value
28412	RenewalTypeExpire = "EXPIRE"
28413)
28414
28415// RenewalType_Values returns all elements of the RenewalType enum
28416func RenewalType_Values() []string {
28417	return []string{
28418		RenewalTypeAutoRenew,
28419		RenewalTypeExpire,
28420	}
28421}
28422
28423// Specifies whether the pricing plan for your reserved queue is ACTIVE or EXPIRED.
28424const (
28425	// ReservationPlanStatusActive is a ReservationPlanStatus enum value
28426	ReservationPlanStatusActive = "ACTIVE"
28427
28428	// ReservationPlanStatusExpired is a ReservationPlanStatus enum value
28429	ReservationPlanStatusExpired = "EXPIRED"
28430)
28431
28432// ReservationPlanStatus_Values returns all elements of the ReservationPlanStatus enum
28433func ReservationPlanStatus_Values() []string {
28434	return []string{
28435		ReservationPlanStatusActive,
28436		ReservationPlanStatusExpired,
28437	}
28438}
28439
28440// Use Respond to AFD (RespondToAfd) to specify how the service changes the
28441// video itself in response to AFD values in the input. * Choose Respond to
28442// clip the input video frame according to the AFD value, input display aspect
28443// ratio, and output display aspect ratio. * Choose Passthrough to include the
28444// input AFD values. Do not choose this when AfdSignaling is set to (NONE).
28445// A preferred implementation of this workflow is to set RespondToAfd to (NONE)
28446// and set AfdSignaling to (AUTO). * Choose None to remove all input AFD values
28447// from this output.
28448const (
28449	// RespondToAfdNone is a RespondToAfd enum value
28450	RespondToAfdNone = "NONE"
28451
28452	// RespondToAfdRespond is a RespondToAfd enum value
28453	RespondToAfdRespond = "RESPOND"
28454
28455	// RespondToAfdPassthrough is a RespondToAfd enum value
28456	RespondToAfdPassthrough = "PASSTHROUGH"
28457)
28458
28459// RespondToAfd_Values returns all elements of the RespondToAfd enum
28460func RespondToAfd_Values() []string {
28461	return []string{
28462		RespondToAfdNone,
28463		RespondToAfdRespond,
28464		RespondToAfdPassthrough,
28465	}
28466}
28467
28468// Choose an Amazon S3 canned ACL for MediaConvert to apply to this output.
28469const (
28470	// S3ObjectCannedAclPublicRead is a S3ObjectCannedAcl enum value
28471	S3ObjectCannedAclPublicRead = "PUBLIC_READ"
28472
28473	// S3ObjectCannedAclAuthenticatedRead is a S3ObjectCannedAcl enum value
28474	S3ObjectCannedAclAuthenticatedRead = "AUTHENTICATED_READ"
28475
28476	// S3ObjectCannedAclBucketOwnerRead is a S3ObjectCannedAcl enum value
28477	S3ObjectCannedAclBucketOwnerRead = "BUCKET_OWNER_READ"
28478
28479	// S3ObjectCannedAclBucketOwnerFullControl is a S3ObjectCannedAcl enum value
28480	S3ObjectCannedAclBucketOwnerFullControl = "BUCKET_OWNER_FULL_CONTROL"
28481)
28482
28483// S3ObjectCannedAcl_Values returns all elements of the S3ObjectCannedAcl enum
28484func S3ObjectCannedAcl_Values() []string {
28485	return []string{
28486		S3ObjectCannedAclPublicRead,
28487		S3ObjectCannedAclAuthenticatedRead,
28488		S3ObjectCannedAclBucketOwnerRead,
28489		S3ObjectCannedAclBucketOwnerFullControl,
28490	}
28491}
28492
28493// Specify how you want your data keys managed. AWS uses data keys to encrypt
28494// your content. AWS also encrypts the data keys themselves, using a customer
28495// master key (CMK), and then stores the encrypted data keys alongside your
28496// encrypted content. Use this setting to specify which AWS service manages
28497// the CMK. For simplest set up, choose Amazon S3 (SERVER_SIDE_ENCRYPTION_S3).
28498// If you want your master key to be managed by AWS Key Management Service (KMS),
28499// choose AWS KMS (SERVER_SIDE_ENCRYPTION_KMS). By default, when you choose
28500// AWS KMS, KMS uses the AWS managed customer master key (CMK) associated with
28501// Amazon S3 to encrypt your data keys. You can optionally choose to specify
28502// a different, customer managed CMK. Do so by specifying the Amazon Resource
28503// Name (ARN) of the key for the setting KMS ARN (kmsKeyArn).
28504const (
28505	// S3ServerSideEncryptionTypeServerSideEncryptionS3 is a S3ServerSideEncryptionType enum value
28506	S3ServerSideEncryptionTypeServerSideEncryptionS3 = "SERVER_SIDE_ENCRYPTION_S3"
28507
28508	// S3ServerSideEncryptionTypeServerSideEncryptionKms is a S3ServerSideEncryptionType enum value
28509	S3ServerSideEncryptionTypeServerSideEncryptionKms = "SERVER_SIDE_ENCRYPTION_KMS"
28510)
28511
28512// S3ServerSideEncryptionType_Values returns all elements of the S3ServerSideEncryptionType enum
28513func S3ServerSideEncryptionType_Values() []string {
28514	return []string{
28515		S3ServerSideEncryptionTypeServerSideEncryptionS3,
28516		S3ServerSideEncryptionTypeServerSideEncryptionKms,
28517	}
28518}
28519
28520// Specify how the service handles outputs that have a different aspect ratio
28521// from the input aspect ratio. Choose Stretch to output (STRETCH_TO_OUTPUT)
28522// to have the service stretch your video image to fit. Keep the setting Default
28523// (DEFAULT) to have the service letterbox your video instead. This setting
28524// overrides any value that you specify for the setting Selection placement
28525// (position) in this output.
28526const (
28527	// ScalingBehaviorDefault is a ScalingBehavior enum value
28528	ScalingBehaviorDefault = "DEFAULT"
28529
28530	// ScalingBehaviorStretchToOutput is a ScalingBehavior enum value
28531	ScalingBehaviorStretchToOutput = "STRETCH_TO_OUTPUT"
28532)
28533
28534// ScalingBehavior_Values returns all elements of the ScalingBehavior enum
28535func ScalingBehavior_Values() []string {
28536	return []string{
28537		ScalingBehaviorDefault,
28538		ScalingBehaviorStretchToOutput,
28539	}
28540}
28541
28542// Set Framerate (SccDestinationFramerate) to make sure that the captions and
28543// the video are synchronized in the output. Specify a frame rate that matches
28544// the frame rate of the associated video. If the video frame rate is 29.97,
28545// choose 29.97 dropframe (FRAMERATE_29_97_DROPFRAME) only if the video has
28546// video_insertion=true and drop_frame_timecode=true; otherwise, choose 29.97
28547// non-dropframe (FRAMERATE_29_97_NON_DROPFRAME).
28548const (
28549	// SccDestinationFramerateFramerate2397 is a SccDestinationFramerate enum value
28550	SccDestinationFramerateFramerate2397 = "FRAMERATE_23_97"
28551
28552	// SccDestinationFramerateFramerate24 is a SccDestinationFramerate enum value
28553	SccDestinationFramerateFramerate24 = "FRAMERATE_24"
28554
28555	// SccDestinationFramerateFramerate25 is a SccDestinationFramerate enum value
28556	SccDestinationFramerateFramerate25 = "FRAMERATE_25"
28557
28558	// SccDestinationFramerateFramerate2997Dropframe is a SccDestinationFramerate enum value
28559	SccDestinationFramerateFramerate2997Dropframe = "FRAMERATE_29_97_DROPFRAME"
28560
28561	// SccDestinationFramerateFramerate2997NonDropframe is a SccDestinationFramerate enum value
28562	SccDestinationFramerateFramerate2997NonDropframe = "FRAMERATE_29_97_NON_DROPFRAME"
28563)
28564
28565// SccDestinationFramerate_Values returns all elements of the SccDestinationFramerate enum
28566func SccDestinationFramerate_Values() []string {
28567	return []string{
28568		SccDestinationFramerateFramerate2397,
28569		SccDestinationFramerateFramerate24,
28570		SccDestinationFramerateFramerate25,
28571		SccDestinationFramerateFramerate2997Dropframe,
28572		SccDestinationFramerateFramerate2997NonDropframe,
28573	}
28574}
28575
28576// Enable this setting when you run a test job to estimate how many reserved
28577// transcoding slots (RTS) you need. When this is enabled, MediaConvert runs
28578// your job from an on-demand queue with similar performance to what you will
28579// see with one RTS in a reserved queue. This setting is disabled by default.
28580const (
28581	// SimulateReservedQueueDisabled is a SimulateReservedQueue enum value
28582	SimulateReservedQueueDisabled = "DISABLED"
28583
28584	// SimulateReservedQueueEnabled is a SimulateReservedQueue enum value
28585	SimulateReservedQueueEnabled = "ENABLED"
28586)
28587
28588// SimulateReservedQueue_Values returns all elements of the SimulateReservedQueue enum
28589func SimulateReservedQueue_Values() []string {
28590	return []string{
28591		SimulateReservedQueueDisabled,
28592		SimulateReservedQueueEnabled,
28593	}
28594}
28595
28596// Specify how often MediaConvert sends STATUS_UPDATE events to Amazon CloudWatch
28597// Events. Set the interval, in seconds, between status updates. MediaConvert
28598// sends an update at this interval from the time the service begins processing
28599// your job to the time it completes the transcode or encounters an error.
28600const (
28601	// StatusUpdateIntervalSeconds10 is a StatusUpdateInterval enum value
28602	StatusUpdateIntervalSeconds10 = "SECONDS_10"
28603
28604	// StatusUpdateIntervalSeconds12 is a StatusUpdateInterval enum value
28605	StatusUpdateIntervalSeconds12 = "SECONDS_12"
28606
28607	// StatusUpdateIntervalSeconds15 is a StatusUpdateInterval enum value
28608	StatusUpdateIntervalSeconds15 = "SECONDS_15"
28609
28610	// StatusUpdateIntervalSeconds20 is a StatusUpdateInterval enum value
28611	StatusUpdateIntervalSeconds20 = "SECONDS_20"
28612
28613	// StatusUpdateIntervalSeconds30 is a StatusUpdateInterval enum value
28614	StatusUpdateIntervalSeconds30 = "SECONDS_30"
28615
28616	// StatusUpdateIntervalSeconds60 is a StatusUpdateInterval enum value
28617	StatusUpdateIntervalSeconds60 = "SECONDS_60"
28618
28619	// StatusUpdateIntervalSeconds120 is a StatusUpdateInterval enum value
28620	StatusUpdateIntervalSeconds120 = "SECONDS_120"
28621
28622	// StatusUpdateIntervalSeconds180 is a StatusUpdateInterval enum value
28623	StatusUpdateIntervalSeconds180 = "SECONDS_180"
28624
28625	// StatusUpdateIntervalSeconds240 is a StatusUpdateInterval enum value
28626	StatusUpdateIntervalSeconds240 = "SECONDS_240"
28627
28628	// StatusUpdateIntervalSeconds300 is a StatusUpdateInterval enum value
28629	StatusUpdateIntervalSeconds300 = "SECONDS_300"
28630
28631	// StatusUpdateIntervalSeconds360 is a StatusUpdateInterval enum value
28632	StatusUpdateIntervalSeconds360 = "SECONDS_360"
28633
28634	// StatusUpdateIntervalSeconds420 is a StatusUpdateInterval enum value
28635	StatusUpdateIntervalSeconds420 = "SECONDS_420"
28636
28637	// StatusUpdateIntervalSeconds480 is a StatusUpdateInterval enum value
28638	StatusUpdateIntervalSeconds480 = "SECONDS_480"
28639
28640	// StatusUpdateIntervalSeconds540 is a StatusUpdateInterval enum value
28641	StatusUpdateIntervalSeconds540 = "SECONDS_540"
28642
28643	// StatusUpdateIntervalSeconds600 is a StatusUpdateInterval enum value
28644	StatusUpdateIntervalSeconds600 = "SECONDS_600"
28645)
28646
28647// StatusUpdateInterval_Values returns all elements of the StatusUpdateInterval enum
28648func StatusUpdateInterval_Values() []string {
28649	return []string{
28650		StatusUpdateIntervalSeconds10,
28651		StatusUpdateIntervalSeconds12,
28652		StatusUpdateIntervalSeconds15,
28653		StatusUpdateIntervalSeconds20,
28654		StatusUpdateIntervalSeconds30,
28655		StatusUpdateIntervalSeconds60,
28656		StatusUpdateIntervalSeconds120,
28657		StatusUpdateIntervalSeconds180,
28658		StatusUpdateIntervalSeconds240,
28659		StatusUpdateIntervalSeconds300,
28660		StatusUpdateIntervalSeconds360,
28661		StatusUpdateIntervalSeconds420,
28662		StatusUpdateIntervalSeconds480,
28663		StatusUpdateIntervalSeconds540,
28664		StatusUpdateIntervalSeconds600,
28665	}
28666}
28667
28668// A page type as defined in the standard ETSI EN 300 468, Table 94
28669const (
28670	// TeletextPageTypePageTypeInitial is a TeletextPageType enum value
28671	TeletextPageTypePageTypeInitial = "PAGE_TYPE_INITIAL"
28672
28673	// TeletextPageTypePageTypeSubtitle is a TeletextPageType enum value
28674	TeletextPageTypePageTypeSubtitle = "PAGE_TYPE_SUBTITLE"
28675
28676	// TeletextPageTypePageTypeAddlInfo is a TeletextPageType enum value
28677	TeletextPageTypePageTypeAddlInfo = "PAGE_TYPE_ADDL_INFO"
28678
28679	// TeletextPageTypePageTypeProgramSchedule is a TeletextPageType enum value
28680	TeletextPageTypePageTypeProgramSchedule = "PAGE_TYPE_PROGRAM_SCHEDULE"
28681
28682	// TeletextPageTypePageTypeHearingImpairedSubtitle is a TeletextPageType enum value
28683	TeletextPageTypePageTypeHearingImpairedSubtitle = "PAGE_TYPE_HEARING_IMPAIRED_SUBTITLE"
28684)
28685
28686// TeletextPageType_Values returns all elements of the TeletextPageType enum
28687func TeletextPageType_Values() []string {
28688	return []string{
28689		TeletextPageTypePageTypeInitial,
28690		TeletextPageTypePageTypeSubtitle,
28691		TeletextPageTypePageTypeAddlInfo,
28692		TeletextPageTypePageTypeProgramSchedule,
28693		TeletextPageTypePageTypeHearingImpairedSubtitle,
28694	}
28695}
28696
28697// Use Position (Position) under under Timecode burn-in (TimecodeBurnIn) to
28698// specify the location the burned-in timecode on output video.
28699const (
28700	// TimecodeBurninPositionTopCenter is a TimecodeBurninPosition enum value
28701	TimecodeBurninPositionTopCenter = "TOP_CENTER"
28702
28703	// TimecodeBurninPositionTopLeft is a TimecodeBurninPosition enum value
28704	TimecodeBurninPositionTopLeft = "TOP_LEFT"
28705
28706	// TimecodeBurninPositionTopRight is a TimecodeBurninPosition enum value
28707	TimecodeBurninPositionTopRight = "TOP_RIGHT"
28708
28709	// TimecodeBurninPositionMiddleLeft is a TimecodeBurninPosition enum value
28710	TimecodeBurninPositionMiddleLeft = "MIDDLE_LEFT"
28711
28712	// TimecodeBurninPositionMiddleCenter is a TimecodeBurninPosition enum value
28713	TimecodeBurninPositionMiddleCenter = "MIDDLE_CENTER"
28714
28715	// TimecodeBurninPositionMiddleRight is a TimecodeBurninPosition enum value
28716	TimecodeBurninPositionMiddleRight = "MIDDLE_RIGHT"
28717
28718	// TimecodeBurninPositionBottomLeft is a TimecodeBurninPosition enum value
28719	TimecodeBurninPositionBottomLeft = "BOTTOM_LEFT"
28720
28721	// TimecodeBurninPositionBottomCenter is a TimecodeBurninPosition enum value
28722	TimecodeBurninPositionBottomCenter = "BOTTOM_CENTER"
28723
28724	// TimecodeBurninPositionBottomRight is a TimecodeBurninPosition enum value
28725	TimecodeBurninPositionBottomRight = "BOTTOM_RIGHT"
28726)
28727
28728// TimecodeBurninPosition_Values returns all elements of the TimecodeBurninPosition enum
28729func TimecodeBurninPosition_Values() []string {
28730	return []string{
28731		TimecodeBurninPositionTopCenter,
28732		TimecodeBurninPositionTopLeft,
28733		TimecodeBurninPositionTopRight,
28734		TimecodeBurninPositionMiddleLeft,
28735		TimecodeBurninPositionMiddleCenter,
28736		TimecodeBurninPositionMiddleRight,
28737		TimecodeBurninPositionBottomLeft,
28738		TimecodeBurninPositionBottomCenter,
28739		TimecodeBurninPositionBottomRight,
28740	}
28741}
28742
28743// Use Source (TimecodeSource) to set how timecodes are handled within this
28744// job. To make sure that your video, audio, captions, and markers are synchronized
28745// and that time-based features, such as image inserter, work correctly, choose
28746// the Timecode source option that matches your assets. All timecodes are in
28747// a 24-hour format with frame number (HH:MM:SS:FF). * Embedded (EMBEDDED) -
28748// Use the timecode that is in the input video. If no embedded timecode is in
28749// the source, the service will use Start at 0 (ZEROBASED) instead. * Start
28750// at 0 (ZEROBASED) - Set the timecode of the initial frame to 00:00:00:00.
28751// * Specified Start (SPECIFIEDSTART) - Set the timecode of the initial frame
28752// to a value other than zero. You use Start timecode (Start) to provide this
28753// value.
28754const (
28755	// TimecodeSourceEmbedded is a TimecodeSource enum value
28756	TimecodeSourceEmbedded = "EMBEDDED"
28757
28758	// TimecodeSourceZerobased is a TimecodeSource enum value
28759	TimecodeSourceZerobased = "ZEROBASED"
28760
28761	// TimecodeSourceSpecifiedstart is a TimecodeSource enum value
28762	TimecodeSourceSpecifiedstart = "SPECIFIEDSTART"
28763)
28764
28765// TimecodeSource_Values returns all elements of the TimecodeSource enum
28766func TimecodeSource_Values() []string {
28767	return []string{
28768		TimecodeSourceEmbedded,
28769		TimecodeSourceZerobased,
28770		TimecodeSourceSpecifiedstart,
28771	}
28772}
28773
28774// Applies only to HLS outputs. Use this setting to specify whether the service
28775// inserts the ID3 timed metadata from the input in this output.
28776const (
28777	// TimedMetadataPassthrough is a TimedMetadata enum value
28778	TimedMetadataPassthrough = "PASSTHROUGH"
28779
28780	// TimedMetadataNone is a TimedMetadata enum value
28781	TimedMetadataNone = "NONE"
28782)
28783
28784// TimedMetadata_Values returns all elements of the TimedMetadata enum
28785func TimedMetadata_Values() []string {
28786	return []string{
28787		TimedMetadataPassthrough,
28788		TimedMetadataNone,
28789	}
28790}
28791
28792// Pass through style and position information from a TTML-like input source
28793// (TTML, SMPTE-TT) to the TTML output.
28794const (
28795	// TtmlStylePassthroughEnabled is a TtmlStylePassthrough enum value
28796	TtmlStylePassthroughEnabled = "ENABLED"
28797
28798	// TtmlStylePassthroughDisabled is a TtmlStylePassthrough enum value
28799	TtmlStylePassthroughDisabled = "DISABLED"
28800)
28801
28802// TtmlStylePassthrough_Values returns all elements of the TtmlStylePassthrough enum
28803func TtmlStylePassthrough_Values() []string {
28804	return []string{
28805		TtmlStylePassthroughEnabled,
28806		TtmlStylePassthroughDisabled,
28807	}
28808}
28809
28810const (
28811	// TypeSystem is a Type enum value
28812	TypeSystem = "SYSTEM"
28813
28814	// TypeCustom is a Type enum value
28815	TypeCustom = "CUSTOM"
28816)
28817
28818// Type_Values returns all elements of the Type enum
28819func Type_Values() []string {
28820	return []string{
28821		TypeSystem,
28822		TypeCustom,
28823	}
28824}
28825
28826// Specify the VC3 class to choose the quality characteristics for this output.
28827// VC3 class, together with the settings Framerate (framerateNumerator and framerateDenominator)
28828// and Resolution (height and width), determine your output bitrate. For example,
28829// say that your video resolution is 1920x1080 and your framerate is 29.97.
28830// Then Class 145 (CLASS_145) gives you an output with a bitrate of approximately
28831// 145 Mbps and Class 220 (CLASS_220) gives you and output with a bitrate of
28832// approximately 220 Mbps. VC3 class also specifies the color bit depth of your
28833// output.
28834const (
28835	// Vc3ClassClass1458bit is a Vc3Class enum value
28836	Vc3ClassClass1458bit = "CLASS_145_8BIT"
28837
28838	// Vc3ClassClass2208bit is a Vc3Class enum value
28839	Vc3ClassClass2208bit = "CLASS_220_8BIT"
28840
28841	// Vc3ClassClass22010bit is a Vc3Class enum value
28842	Vc3ClassClass22010bit = "CLASS_220_10BIT"
28843)
28844
28845// Vc3Class_Values returns all elements of the Vc3Class enum
28846func Vc3Class_Values() []string {
28847	return []string{
28848		Vc3ClassClass1458bit,
28849		Vc3ClassClass2208bit,
28850		Vc3ClassClass22010bit,
28851	}
28852}
28853
28854// If you are using the console, use the Framerate setting to specify the frame
28855// rate for this output. If you want to keep the same frame rate as the input
28856// video, choose Follow source. If you want to do frame rate conversion, choose
28857// a frame rate from the dropdown list or choose Custom. The framerates shown
28858// in the dropdown list are decimal approximations of fractions. If you choose
28859// Custom, specify your frame rate as a fraction. If you are creating your transcoding
28860// job specification as a JSON file without the console, use FramerateControl
28861// to specify which value the service uses for the frame rate for this output.
28862// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
28863// from the input. Choose SPECIFIED if you want the service to use the frame
28864// rate you specify in the settings FramerateNumerator and FramerateDenominator.
28865const (
28866	// Vc3FramerateControlInitializeFromSource is a Vc3FramerateControl enum value
28867	Vc3FramerateControlInitializeFromSource = "INITIALIZE_FROM_SOURCE"
28868
28869	// Vc3FramerateControlSpecified is a Vc3FramerateControl enum value
28870	Vc3FramerateControlSpecified = "SPECIFIED"
28871)
28872
28873// Vc3FramerateControl_Values returns all elements of the Vc3FramerateControl enum
28874func Vc3FramerateControl_Values() []string {
28875	return []string{
28876		Vc3FramerateControlInitializeFromSource,
28877		Vc3FramerateControlSpecified,
28878	}
28879}
28880
28881// Choose the method that you want MediaConvert to use when increasing or decreasing
28882// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
28883// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
28884// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
28885// smooth picture, but might introduce undesirable video artifacts. For complex
28886// frame rate conversions, especially if your source video has already been
28887// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
28888// motion-compensated interpolation. FrameFormer chooses the best conversion
28889// method frame by frame. Note that using FrameFormer increases the transcoding
28890// time and incurs a significant add-on cost.
28891const (
28892	// Vc3FramerateConversionAlgorithmDuplicateDrop is a Vc3FramerateConversionAlgorithm enum value
28893	Vc3FramerateConversionAlgorithmDuplicateDrop = "DUPLICATE_DROP"
28894
28895	// Vc3FramerateConversionAlgorithmInterpolate is a Vc3FramerateConversionAlgorithm enum value
28896	Vc3FramerateConversionAlgorithmInterpolate = "INTERPOLATE"
28897
28898	// Vc3FramerateConversionAlgorithmFrameformer is a Vc3FramerateConversionAlgorithm enum value
28899	Vc3FramerateConversionAlgorithmFrameformer = "FRAMEFORMER"
28900)
28901
28902// Vc3FramerateConversionAlgorithm_Values returns all elements of the Vc3FramerateConversionAlgorithm enum
28903func Vc3FramerateConversionAlgorithm_Values() []string {
28904	return []string{
28905		Vc3FramerateConversionAlgorithmDuplicateDrop,
28906		Vc3FramerateConversionAlgorithmInterpolate,
28907		Vc3FramerateConversionAlgorithmFrameformer,
28908	}
28909}
28910
28911// Optional. Choose the scan line type for this output. If you don't specify
28912// a value, MediaConvert will create a progressive output.
28913const (
28914	// Vc3InterlaceModeInterlaced is a Vc3InterlaceMode enum value
28915	Vc3InterlaceModeInterlaced = "INTERLACED"
28916
28917	// Vc3InterlaceModeProgressive is a Vc3InterlaceMode enum value
28918	Vc3InterlaceModeProgressive = "PROGRESSIVE"
28919)
28920
28921// Vc3InterlaceMode_Values returns all elements of the Vc3InterlaceMode enum
28922func Vc3InterlaceMode_Values() []string {
28923	return []string{
28924		Vc3InterlaceModeInterlaced,
28925		Vc3InterlaceModeProgressive,
28926	}
28927}
28928
28929// Use this setting for interlaced outputs, when your output frame rate is half
28930// of your input frame rate. In this situation, choose Optimized interlacing
28931// (INTERLACED_OPTIMIZE) to create a better quality interlaced output. In this
28932// case, each progressive frame from the input corresponds to an interlaced
28933// field in the output. Keep the default value, Basic interlacing (INTERLACED),
28934// for all other output frame rates. With basic interlacing, MediaConvert performs
28935// any frame rate conversion first and then interlaces the frames. When you
28936// choose Optimized interlacing and you set your output frame rate to a value
28937// that isn't suitable for optimized interlacing, MediaConvert automatically
28938// falls back to basic interlacing. Required settings: To use optimized interlacing,
28939// you must set Telecine (telecine) to None (NONE) or Soft (SOFT). You can't
28940// use optimized interlacing for hard telecine outputs. You must also set Interlace
28941// mode (interlaceMode) to a value other than Progressive (PROGRESSIVE).
28942const (
28943	// Vc3ScanTypeConversionModeInterlaced is a Vc3ScanTypeConversionMode enum value
28944	Vc3ScanTypeConversionModeInterlaced = "INTERLACED"
28945
28946	// Vc3ScanTypeConversionModeInterlacedOptimize is a Vc3ScanTypeConversionMode enum value
28947	Vc3ScanTypeConversionModeInterlacedOptimize = "INTERLACED_OPTIMIZE"
28948)
28949
28950// Vc3ScanTypeConversionMode_Values returns all elements of the Vc3ScanTypeConversionMode enum
28951func Vc3ScanTypeConversionMode_Values() []string {
28952	return []string{
28953		Vc3ScanTypeConversionModeInterlaced,
28954		Vc3ScanTypeConversionModeInterlacedOptimize,
28955	}
28956}
28957
28958// Ignore this setting unless your input frame rate is 23.976 or 24 frames per
28959// second (fps). Enable slow PAL to create a 25 fps output by relabeling the
28960// video frames and resampling your audio. Note that enabling this setting will
28961// slightly reduce the duration of your video. Related settings: You must also
28962// set Framerate to 25. In your JSON job specification, set (framerateControl)
28963// to (SPECIFIED), (framerateNumerator) to 25 and (framerateDenominator) to
28964// 1.
28965const (
28966	// Vc3SlowPalDisabled is a Vc3SlowPal enum value
28967	Vc3SlowPalDisabled = "DISABLED"
28968
28969	// Vc3SlowPalEnabled is a Vc3SlowPal enum value
28970	Vc3SlowPalEnabled = "ENABLED"
28971)
28972
28973// Vc3SlowPal_Values returns all elements of the Vc3SlowPal enum
28974func Vc3SlowPal_Values() []string {
28975	return []string{
28976		Vc3SlowPalDisabled,
28977		Vc3SlowPalEnabled,
28978	}
28979}
28980
28981// When you do frame rate conversion from 23.976 frames per second (fps) to
28982// 29.97 fps, and your output scan type is interlaced, you can optionally enable
28983// hard telecine (HARD) to create a smoother picture. When you keep the default
28984// value, None (NONE), MediaConvert does a standard frame rate conversion to
28985// 29.97 without doing anything with the field polarity to create a smoother
28986// picture.
28987const (
28988	// Vc3TelecineNone is a Vc3Telecine enum value
28989	Vc3TelecineNone = "NONE"
28990
28991	// Vc3TelecineHard is a Vc3Telecine enum value
28992	Vc3TelecineHard = "HARD"
28993)
28994
28995// Vc3Telecine_Values returns all elements of the Vc3Telecine enum
28996func Vc3Telecine_Values() []string {
28997	return []string{
28998		Vc3TelecineNone,
28999		Vc3TelecineHard,
29000	}
29001}
29002
29003// Type of video codec
29004const (
29005	// VideoCodecAv1 is a VideoCodec enum value
29006	VideoCodecAv1 = "AV1"
29007
29008	// VideoCodecAvcIntra is a VideoCodec enum value
29009	VideoCodecAvcIntra = "AVC_INTRA"
29010
29011	// VideoCodecFrameCapture is a VideoCodec enum value
29012	VideoCodecFrameCapture = "FRAME_CAPTURE"
29013
29014	// VideoCodecH264 is a VideoCodec enum value
29015	VideoCodecH264 = "H_264"
29016
29017	// VideoCodecH265 is a VideoCodec enum value
29018	VideoCodecH265 = "H_265"
29019
29020	// VideoCodecMpeg2 is a VideoCodec enum value
29021	VideoCodecMpeg2 = "MPEG2"
29022
29023	// VideoCodecProres is a VideoCodec enum value
29024	VideoCodecProres = "PRORES"
29025
29026	// VideoCodecVc3 is a VideoCodec enum value
29027	VideoCodecVc3 = "VC3"
29028
29029	// VideoCodecVp8 is a VideoCodec enum value
29030	VideoCodecVp8 = "VP8"
29031
29032	// VideoCodecVp9 is a VideoCodec enum value
29033	VideoCodecVp9 = "VP9"
29034)
29035
29036// VideoCodec_Values returns all elements of the VideoCodec enum
29037func VideoCodec_Values() []string {
29038	return []string{
29039		VideoCodecAv1,
29040		VideoCodecAvcIntra,
29041		VideoCodecFrameCapture,
29042		VideoCodecH264,
29043		VideoCodecH265,
29044		VideoCodecMpeg2,
29045		VideoCodecProres,
29046		VideoCodecVc3,
29047		VideoCodecVp8,
29048		VideoCodecVp9,
29049	}
29050}
29051
29052// Applies only to H.264, H.265, MPEG2, and ProRes outputs. Only enable Timecode
29053// insertion when the input frame rate is identical to the output frame rate.
29054// To include timecodes in this output, set Timecode insertion (VideoTimecodeInsertion)
29055// to PIC_TIMING_SEI. To leave them out, set it to DISABLED. Default is DISABLED.
29056// When the service inserts timecodes in an output, by default, it uses any
29057// embedded timecodes from the input. If none are present, the service will
29058// set the timecode for the first output frame to zero. To change this default
29059// behavior, adjust the settings under Timecode configuration (TimecodeConfig).
29060// In the console, these settings are located under Job > Job settings > Timecode
29061// configuration. Note - Timecode source under input settings (InputTimecodeSource)
29062// does not affect the timecodes that are inserted in the output. Source under
29063// Job settings > Timecode configuration (TimecodeSource) does.
29064const (
29065	// VideoTimecodeInsertionDisabled is a VideoTimecodeInsertion enum value
29066	VideoTimecodeInsertionDisabled = "DISABLED"
29067
29068	// VideoTimecodeInsertionPicTimingSei is a VideoTimecodeInsertion enum value
29069	VideoTimecodeInsertionPicTimingSei = "PIC_TIMING_SEI"
29070)
29071
29072// VideoTimecodeInsertion_Values returns all elements of the VideoTimecodeInsertion enum
29073func VideoTimecodeInsertion_Values() []string {
29074	return []string{
29075		VideoTimecodeInsertionDisabled,
29076		VideoTimecodeInsertionPicTimingSei,
29077	}
29078}
29079
29080// If you are using the console, use the Framerate setting to specify the frame
29081// rate for this output. If you want to keep the same frame rate as the input
29082// video, choose Follow source. If you want to do frame rate conversion, choose
29083// a frame rate from the dropdown list or choose Custom. The framerates shown
29084// in the dropdown list are decimal approximations of fractions. If you choose
29085// Custom, specify your frame rate as a fraction. If you are creating your transcoding
29086// job specification as a JSON file without the console, use FramerateControl
29087// to specify which value the service uses for the frame rate for this output.
29088// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
29089// from the input. Choose SPECIFIED if you want the service to use the frame
29090// rate you specify in the settings FramerateNumerator and FramerateDenominator.
29091const (
29092	// Vp8FramerateControlInitializeFromSource is a Vp8FramerateControl enum value
29093	Vp8FramerateControlInitializeFromSource = "INITIALIZE_FROM_SOURCE"
29094
29095	// Vp8FramerateControlSpecified is a Vp8FramerateControl enum value
29096	Vp8FramerateControlSpecified = "SPECIFIED"
29097)
29098
29099// Vp8FramerateControl_Values returns all elements of the Vp8FramerateControl enum
29100func Vp8FramerateControl_Values() []string {
29101	return []string{
29102		Vp8FramerateControlInitializeFromSource,
29103		Vp8FramerateControlSpecified,
29104	}
29105}
29106
29107// Choose the method that you want MediaConvert to use when increasing or decreasing
29108// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
29109// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
29110// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
29111// smooth picture, but might introduce undesirable video artifacts. For complex
29112// frame rate conversions, especially if your source video has already been
29113// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
29114// motion-compensated interpolation. FrameFormer chooses the best conversion
29115// method frame by frame. Note that using FrameFormer increases the transcoding
29116// time and incurs a significant add-on cost.
29117const (
29118	// Vp8FramerateConversionAlgorithmDuplicateDrop is a Vp8FramerateConversionAlgorithm enum value
29119	Vp8FramerateConversionAlgorithmDuplicateDrop = "DUPLICATE_DROP"
29120
29121	// Vp8FramerateConversionAlgorithmInterpolate is a Vp8FramerateConversionAlgorithm enum value
29122	Vp8FramerateConversionAlgorithmInterpolate = "INTERPOLATE"
29123
29124	// Vp8FramerateConversionAlgorithmFrameformer is a Vp8FramerateConversionAlgorithm enum value
29125	Vp8FramerateConversionAlgorithmFrameformer = "FRAMEFORMER"
29126)
29127
29128// Vp8FramerateConversionAlgorithm_Values returns all elements of the Vp8FramerateConversionAlgorithm enum
29129func Vp8FramerateConversionAlgorithm_Values() []string {
29130	return []string{
29131		Vp8FramerateConversionAlgorithmDuplicateDrop,
29132		Vp8FramerateConversionAlgorithmInterpolate,
29133		Vp8FramerateConversionAlgorithmFrameformer,
29134	}
29135}
29136
29137// Optional. Specify how the service determines the pixel aspect ratio (PAR)
29138// for this output. The default behavior, Follow source (INITIALIZE_FROM_SOURCE),
29139// uses the PAR from your input video for your output. To specify a different
29140// PAR in the console, choose any value other than Follow source. To specify
29141// a different PAR by editing the JSON job specification, choose SPECIFIED.
29142// When you choose SPECIFIED for this setting, you must also specify values
29143// for the parNumerator and parDenominator settings.
29144const (
29145	// Vp8ParControlInitializeFromSource is a Vp8ParControl enum value
29146	Vp8ParControlInitializeFromSource = "INITIALIZE_FROM_SOURCE"
29147
29148	// Vp8ParControlSpecified is a Vp8ParControl enum value
29149	Vp8ParControlSpecified = "SPECIFIED"
29150)
29151
29152// Vp8ParControl_Values returns all elements of the Vp8ParControl enum
29153func Vp8ParControl_Values() []string {
29154	return []string{
29155		Vp8ParControlInitializeFromSource,
29156		Vp8ParControlSpecified,
29157	}
29158}
29159
29160// Optional. Use Quality tuning level (qualityTuningLevel) to choose how you
29161// want to trade off encoding speed for output video quality. The default behavior
29162// is faster, lower quality, multi-pass encoding.
29163const (
29164	// Vp8QualityTuningLevelMultiPass is a Vp8QualityTuningLevel enum value
29165	Vp8QualityTuningLevelMultiPass = "MULTI_PASS"
29166
29167	// Vp8QualityTuningLevelMultiPassHq is a Vp8QualityTuningLevel enum value
29168	Vp8QualityTuningLevelMultiPassHq = "MULTI_PASS_HQ"
29169)
29170
29171// Vp8QualityTuningLevel_Values returns all elements of the Vp8QualityTuningLevel enum
29172func Vp8QualityTuningLevel_Values() []string {
29173	return []string{
29174		Vp8QualityTuningLevelMultiPass,
29175		Vp8QualityTuningLevelMultiPassHq,
29176	}
29177}
29178
29179// With the VP8 codec, you can use only the variable bitrate (VBR) rate control
29180// mode.
29181const (
29182	// Vp8RateControlModeVbr is a Vp8RateControlMode enum value
29183	Vp8RateControlModeVbr = "VBR"
29184)
29185
29186// Vp8RateControlMode_Values returns all elements of the Vp8RateControlMode enum
29187func Vp8RateControlMode_Values() []string {
29188	return []string{
29189		Vp8RateControlModeVbr,
29190	}
29191}
29192
29193// If you are using the console, use the Framerate setting to specify the frame
29194// rate for this output. If you want to keep the same frame rate as the input
29195// video, choose Follow source. If you want to do frame rate conversion, choose
29196// a frame rate from the dropdown list or choose Custom. The framerates shown
29197// in the dropdown list are decimal approximations of fractions. If you choose
29198// Custom, specify your frame rate as a fraction. If you are creating your transcoding
29199// job specification as a JSON file without the console, use FramerateControl
29200// to specify which value the service uses for the frame rate for this output.
29201// Choose INITIALIZE_FROM_SOURCE if you want the service to use the frame rate
29202// from the input. Choose SPECIFIED if you want the service to use the frame
29203// rate you specify in the settings FramerateNumerator and FramerateDenominator.
29204const (
29205	// Vp9FramerateControlInitializeFromSource is a Vp9FramerateControl enum value
29206	Vp9FramerateControlInitializeFromSource = "INITIALIZE_FROM_SOURCE"
29207
29208	// Vp9FramerateControlSpecified is a Vp9FramerateControl enum value
29209	Vp9FramerateControlSpecified = "SPECIFIED"
29210)
29211
29212// Vp9FramerateControl_Values returns all elements of the Vp9FramerateControl enum
29213func Vp9FramerateControl_Values() []string {
29214	return []string{
29215		Vp9FramerateControlInitializeFromSource,
29216		Vp9FramerateControlSpecified,
29217	}
29218}
29219
29220// Choose the method that you want MediaConvert to use when increasing or decreasing
29221// the frame rate. We recommend using drop duplicate (DUPLICATE_DROP) for numerically
29222// simple conversions, such as 60 fps to 30 fps. For numerically complex conversions,
29223// you can use interpolate (INTERPOLATE) to avoid stutter. This results in a
29224// smooth picture, but might introduce undesirable video artifacts. For complex
29225// frame rate conversions, especially if your source video has already been
29226// converted from its original cadence, use FrameFormer (FRAMEFORMER) to do
29227// motion-compensated interpolation. FrameFormer chooses the best conversion
29228// method frame by frame. Note that using FrameFormer increases the transcoding
29229// time and incurs a significant add-on cost.
29230const (
29231	// Vp9FramerateConversionAlgorithmDuplicateDrop is a Vp9FramerateConversionAlgorithm enum value
29232	Vp9FramerateConversionAlgorithmDuplicateDrop = "DUPLICATE_DROP"
29233
29234	// Vp9FramerateConversionAlgorithmInterpolate is a Vp9FramerateConversionAlgorithm enum value
29235	Vp9FramerateConversionAlgorithmInterpolate = "INTERPOLATE"
29236
29237	// Vp9FramerateConversionAlgorithmFrameformer is a Vp9FramerateConversionAlgorithm enum value
29238	Vp9FramerateConversionAlgorithmFrameformer = "FRAMEFORMER"
29239)
29240
29241// Vp9FramerateConversionAlgorithm_Values returns all elements of the Vp9FramerateConversionAlgorithm enum
29242func Vp9FramerateConversionAlgorithm_Values() []string {
29243	return []string{
29244		Vp9FramerateConversionAlgorithmDuplicateDrop,
29245		Vp9FramerateConversionAlgorithmInterpolate,
29246		Vp9FramerateConversionAlgorithmFrameformer,
29247	}
29248}
29249
29250// Optional. Specify how the service determines the pixel aspect ratio (PAR)
29251// for this output. The default behavior, Follow source (INITIALIZE_FROM_SOURCE),
29252// uses the PAR from your input video for your output. To specify a different
29253// PAR in the console, choose any value other than Follow source. To specify
29254// a different PAR by editing the JSON job specification, choose SPECIFIED.
29255// When you choose SPECIFIED for this setting, you must also specify values
29256// for the parNumerator and parDenominator settings.
29257const (
29258	// Vp9ParControlInitializeFromSource is a Vp9ParControl enum value
29259	Vp9ParControlInitializeFromSource = "INITIALIZE_FROM_SOURCE"
29260
29261	// Vp9ParControlSpecified is a Vp9ParControl enum value
29262	Vp9ParControlSpecified = "SPECIFIED"
29263)
29264
29265// Vp9ParControl_Values returns all elements of the Vp9ParControl enum
29266func Vp9ParControl_Values() []string {
29267	return []string{
29268		Vp9ParControlInitializeFromSource,
29269		Vp9ParControlSpecified,
29270	}
29271}
29272
29273// Optional. Use Quality tuning level (qualityTuningLevel) to choose how you
29274// want to trade off encoding speed for output video quality. The default behavior
29275// is faster, lower quality, multi-pass encoding.
29276const (
29277	// Vp9QualityTuningLevelMultiPass is a Vp9QualityTuningLevel enum value
29278	Vp9QualityTuningLevelMultiPass = "MULTI_PASS"
29279
29280	// Vp9QualityTuningLevelMultiPassHq is a Vp9QualityTuningLevel enum value
29281	Vp9QualityTuningLevelMultiPassHq = "MULTI_PASS_HQ"
29282)
29283
29284// Vp9QualityTuningLevel_Values returns all elements of the Vp9QualityTuningLevel enum
29285func Vp9QualityTuningLevel_Values() []string {
29286	return []string{
29287		Vp9QualityTuningLevelMultiPass,
29288		Vp9QualityTuningLevelMultiPassHq,
29289	}
29290}
29291
29292// With the VP9 codec, you can use only the variable bitrate (VBR) rate control
29293// mode.
29294const (
29295	// Vp9RateControlModeVbr is a Vp9RateControlMode enum value
29296	Vp9RateControlModeVbr = "VBR"
29297)
29298
29299// Vp9RateControlMode_Values returns all elements of the Vp9RateControlMode enum
29300func Vp9RateControlMode_Values() []string {
29301	return []string{
29302		Vp9RateControlModeVbr,
29303	}
29304}
29305
29306// Optional. Ignore this setting unless Nagra support directs you to specify
29307// a value. When you don't specify a value here, the Nagra NexGuard library
29308// uses its default value.
29309const (
29310	// WatermarkingStrengthLightest is a WatermarkingStrength enum value
29311	WatermarkingStrengthLightest = "LIGHTEST"
29312
29313	// WatermarkingStrengthLighter is a WatermarkingStrength enum value
29314	WatermarkingStrengthLighter = "LIGHTER"
29315
29316	// WatermarkingStrengthDefault is a WatermarkingStrength enum value
29317	WatermarkingStrengthDefault = "DEFAULT"
29318
29319	// WatermarkingStrengthStronger is a WatermarkingStrength enum value
29320	WatermarkingStrengthStronger = "STRONGER"
29321
29322	// WatermarkingStrengthStrongest is a WatermarkingStrength enum value
29323	WatermarkingStrengthStrongest = "STRONGEST"
29324)
29325
29326// WatermarkingStrength_Values returns all elements of the WatermarkingStrength enum
29327func WatermarkingStrength_Values() []string {
29328	return []string{
29329		WatermarkingStrengthLightest,
29330		WatermarkingStrengthLighter,
29331		WatermarkingStrengthDefault,
29332		WatermarkingStrengthStronger,
29333		WatermarkingStrengthStrongest,
29334	}
29335}
29336
29337// The service defaults to using RIFF for WAV outputs. If your output audio
29338// is likely to exceed 4 GB in file size, or if you otherwise need the extended
29339// support of the RF64 format, set your output WAV file format to RF64.
29340const (
29341	// WavFormatRiff is a WavFormat enum value
29342	WavFormatRiff = "RIFF"
29343
29344	// WavFormatRf64 is a WavFormat enum value
29345	WavFormatRf64 = "RF64"
29346)
29347
29348// WavFormat_Values returns all elements of the WavFormat enum
29349func WavFormat_Values() []string {
29350	return []string{
29351		WavFormatRiff,
29352		WavFormatRf64,
29353	}
29354}
29355