1// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
2
3package rekognition
4
5import (
6	"github.com/aws/aws-sdk-go/aws"
7	"github.com/aws/aws-sdk-go/aws/awsutil"
8	"github.com/aws/aws-sdk-go/aws/request"
9)
10
11const opCompareFaces = "CompareFaces"
12
13// CompareFacesRequest generates a "aws/request.Request" representing the
14// client's request for the CompareFaces operation. The "output" return
15// value will be populated with the request's response once the request complets
16// successfuly.
17//
18// Use "Send" method on the returned Request to send the API call to the service.
19// the "output" return value is not valid until after Send returns without error.
20//
21// See CompareFaces for more information on using the CompareFaces
22// API call, and error handling.
23//
24// This method is useful when you want to inject custom logic or configuration
25// into the SDK's request lifecycle. Such as custom headers, or retry logic.
26//
27//
28//    // Example sending a request using the CompareFacesRequest method.
29//    req, resp := client.CompareFacesRequest(params)
30//
31//    err := req.Send()
32//    if err == nil { // resp is now filled
33//        fmt.Println(resp)
34//    }
35func (c *Rekognition) CompareFacesRequest(input *CompareFacesInput) (req *request.Request, output *CompareFacesOutput) {
36	op := &request.Operation{
37		Name:       opCompareFaces,
38		HTTPMethod: "POST",
39		HTTPPath:   "/",
40	}
41
42	if input == nil {
43		input = &CompareFacesInput{}
44	}
45
46	output = &CompareFacesOutput{}
47	req = c.newRequest(op, input, output)
48	return
49}
50
51// CompareFaces API operation for Amazon Rekognition.
52//
53// Compares a face in the source input image with each face detected in the
54// target input image.
55//
56// If the source image contains multiple faces, the service detects the largest
57// face and compares it with each face detected in the target image.
58//
59// In response, the operation returns an array of face matches ordered by similarity
60// score in descending order. For each face match, the response provides a bounding
61// box of the face, facial landmarks, pose details (pitch, role, and yaw), quality
62// (brightness and sharpness), and confidence value (indicating the level of
63// confidence that the bounding box contains a face). The response also provides
64// a similarity score, which indicates how closely the faces match.
65//
66// By default, only faces with a similarity score of greater than or equal to
67// 80% are returned in the response. You can change this value by specifying
68// the SimilarityThreshold parameter.
69//
70// CompareFaces also returns an array of faces that don't match the source image.
71// For each face, it returns a bounding box, confidence value, landmarks, pose
72// details, and quality. The response also returns information about the face
73// in the source image, including the bounding box of the face and confidence
74// value.
75//
76// If the image doesn't contain Exif metadata, CompareFaces returns orientation
77// information for the source and target images. Use these values to display
78// the images with the correct image orientation.
79//
80// This is a stateless API operation. That is, data returned by this operation
81// doesn't persist.
82//
83// For an example, see get-started-exercise-compare-faces.
84//
85// This operation requires permissions to perform the rekognition:CompareFaces
86// action.
87//
88// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
89// with awserr.Error's Code and Message methods to get detailed information about
90// the error.
91//
92// See the AWS API reference guide for Amazon Rekognition's
93// API operation CompareFaces for usage and error information.
94//
95// Returned Error Codes:
96//   * ErrCodeInvalidParameterException "InvalidParameterException"
97//   Input parameter violated a constraint. Validate your parameter before calling
98//   the API operation again.
99//
100//   * ErrCodeInvalidS3ObjectException "InvalidS3ObjectException"
101//   Amazon Rekognition is unable to access the S3 object specified in the request.
102//
103//   * ErrCodeImageTooLargeException "ImageTooLargeException"
104//   The input image size exceeds the allowed limit. For more information, see
105//   limits.
106//
107//   * ErrCodeAccessDeniedException "AccessDeniedException"
108//   You are not authorized to perform the action.
109//
110//   * ErrCodeInternalServerError "InternalServerError"
111//   Amazon Rekognition experienced a service issue. Try your call again.
112//
113//   * ErrCodeThrottlingException "ThrottlingException"
114//   Amazon Rekognition is temporarily unable to process the request. Try your
115//   call again.
116//
117//   * ErrCodeProvisionedThroughputExceededException "ProvisionedThroughputExceededException"
118//   The number of requests exceeded your throughput limit. If you want to increase
119//   this limit, contact Amazon Rekognition.
120//
121//   * ErrCodeInvalidImageFormatException "InvalidImageFormatException"
122//   The provided image format is not supported.
123//
124func (c *Rekognition) CompareFaces(input *CompareFacesInput) (*CompareFacesOutput, error) {
125	req, out := c.CompareFacesRequest(input)
126	return out, req.Send()
127}
128
129// CompareFacesWithContext is the same as CompareFaces with the addition of
130// the ability to pass a context and additional request options.
131//
132// See CompareFaces for details on how to use this API operation.
133//
134// The context must be non-nil and will be used for request cancellation. If
135// the context is nil a panic will occur. In the future the SDK may create
136// sub-contexts for http.Requests. See https://golang.org/pkg/context/
137// for more information on using Contexts.
138func (c *Rekognition) CompareFacesWithContext(ctx aws.Context, input *CompareFacesInput, opts ...request.Option) (*CompareFacesOutput, error) {
139	req, out := c.CompareFacesRequest(input)
140	req.SetContext(ctx)
141	req.ApplyOptions(opts...)
142	return out, req.Send()
143}
144
145const opCreateCollection = "CreateCollection"
146
147// CreateCollectionRequest generates a "aws/request.Request" representing the
148// client's request for the CreateCollection operation. The "output" return
149// value will be populated with the request's response once the request complets
150// successfuly.
151//
152// Use "Send" method on the returned Request to send the API call to the service.
153// the "output" return value is not valid until after Send returns without error.
154//
155// See CreateCollection for more information on using the CreateCollection
156// API call, and error handling.
157//
158// This method is useful when you want to inject custom logic or configuration
159// into the SDK's request lifecycle. Such as custom headers, or retry logic.
160//
161//
162//    // Example sending a request using the CreateCollectionRequest method.
163//    req, resp := client.CreateCollectionRequest(params)
164//
165//    err := req.Send()
166//    if err == nil { // resp is now filled
167//        fmt.Println(resp)
168//    }
169func (c *Rekognition) CreateCollectionRequest(input *CreateCollectionInput) (req *request.Request, output *CreateCollectionOutput) {
170	op := &request.Operation{
171		Name:       opCreateCollection,
172		HTTPMethod: "POST",
173		HTTPPath:   "/",
174	}
175
176	if input == nil {
177		input = &CreateCollectionInput{}
178	}
179
180	output = &CreateCollectionOutput{}
181	req = c.newRequest(op, input, output)
182	return
183}
184
185// CreateCollection API operation for Amazon Rekognition.
186//
187// Creates a collection in an AWS Region. You can add faces to the collection
188// using the operation.
189//
190// For example, you might create collections, one for each of your application
191// users. A user can then index faces using the IndexFaces operation and persist
192// results in a specific collection. Then, a user can search the collection
193// for faces in the user-specific container.
194//
195// Collection names are case-sensitive.
196//
197// For an example, see example1.
198//
199// This operation requires permissions to perform the rekognition:CreateCollection
200// action.
201//
202// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
203// with awserr.Error's Code and Message methods to get detailed information about
204// the error.
205//
206// See the AWS API reference guide for Amazon Rekognition's
207// API operation CreateCollection for usage and error information.
208//
209// Returned Error Codes:
210//   * ErrCodeInvalidParameterException "InvalidParameterException"
211//   Input parameter violated a constraint. Validate your parameter before calling
212//   the API operation again.
213//
214//   * ErrCodeAccessDeniedException "AccessDeniedException"
215//   You are not authorized to perform the action.
216//
217//   * ErrCodeInternalServerError "InternalServerError"
218//   Amazon Rekognition experienced a service issue. Try your call again.
219//
220//   * ErrCodeThrottlingException "ThrottlingException"
221//   Amazon Rekognition is temporarily unable to process the request. Try your
222//   call again.
223//
224//   * ErrCodeProvisionedThroughputExceededException "ProvisionedThroughputExceededException"
225//   The number of requests exceeded your throughput limit. If you want to increase
226//   this limit, contact Amazon Rekognition.
227//
228//   * ErrCodeResourceAlreadyExistsException "ResourceAlreadyExistsException"
229//   A collection with the specified ID already exists.
230//
231func (c *Rekognition) CreateCollection(input *CreateCollectionInput) (*CreateCollectionOutput, error) {
232	req, out := c.CreateCollectionRequest(input)
233	return out, req.Send()
234}
235
236// CreateCollectionWithContext is the same as CreateCollection with the addition of
237// the ability to pass a context and additional request options.
238//
239// See CreateCollection for details on how to use this API operation.
240//
241// The context must be non-nil and will be used for request cancellation. If
242// the context is nil a panic will occur. In the future the SDK may create
243// sub-contexts for http.Requests. See https://golang.org/pkg/context/
244// for more information on using Contexts.
245func (c *Rekognition) CreateCollectionWithContext(ctx aws.Context, input *CreateCollectionInput, opts ...request.Option) (*CreateCollectionOutput, error) {
246	req, out := c.CreateCollectionRequest(input)
247	req.SetContext(ctx)
248	req.ApplyOptions(opts...)
249	return out, req.Send()
250}
251
252const opDeleteCollection = "DeleteCollection"
253
254// DeleteCollectionRequest generates a "aws/request.Request" representing the
255// client's request for the DeleteCollection operation. The "output" return
256// value will be populated with the request's response once the request complets
257// successfuly.
258//
259// Use "Send" method on the returned Request to send the API call to the service.
260// the "output" return value is not valid until after Send returns without error.
261//
262// See DeleteCollection for more information on using the DeleteCollection
263// API call, and error handling.
264//
265// This method is useful when you want to inject custom logic or configuration
266// into the SDK's request lifecycle. Such as custom headers, or retry logic.
267//
268//
269//    // Example sending a request using the DeleteCollectionRequest method.
270//    req, resp := client.DeleteCollectionRequest(params)
271//
272//    err := req.Send()
273//    if err == nil { // resp is now filled
274//        fmt.Println(resp)
275//    }
276func (c *Rekognition) DeleteCollectionRequest(input *DeleteCollectionInput) (req *request.Request, output *DeleteCollectionOutput) {
277	op := &request.Operation{
278		Name:       opDeleteCollection,
279		HTTPMethod: "POST",
280		HTTPPath:   "/",
281	}
282
283	if input == nil {
284		input = &DeleteCollectionInput{}
285	}
286
287	output = &DeleteCollectionOutput{}
288	req = c.newRequest(op, input, output)
289	return
290}
291
292// DeleteCollection API operation for Amazon Rekognition.
293//
294// Deletes the specified collection. Note that this operation removes all faces
295// in the collection. For an example, see example1.
296//
297// This operation requires permissions to perform the rekognition:DeleteCollection
298// action.
299//
300// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
301// with awserr.Error's Code and Message methods to get detailed information about
302// the error.
303//
304// See the AWS API reference guide for Amazon Rekognition's
305// API operation DeleteCollection for usage and error information.
306//
307// Returned Error Codes:
308//   * ErrCodeInvalidParameterException "InvalidParameterException"
309//   Input parameter violated a constraint. Validate your parameter before calling
310//   the API operation again.
311//
312//   * ErrCodeAccessDeniedException "AccessDeniedException"
313//   You are not authorized to perform the action.
314//
315//   * ErrCodeInternalServerError "InternalServerError"
316//   Amazon Rekognition experienced a service issue. Try your call again.
317//
318//   * ErrCodeThrottlingException "ThrottlingException"
319//   Amazon Rekognition is temporarily unable to process the request. Try your
320//   call again.
321//
322//   * ErrCodeProvisionedThroughputExceededException "ProvisionedThroughputExceededException"
323//   The number of requests exceeded your throughput limit. If you want to increase
324//   this limit, contact Amazon Rekognition.
325//
326//   * ErrCodeResourceNotFoundException "ResourceNotFoundException"
327//   Collection specified in the request is not found.
328//
329func (c *Rekognition) DeleteCollection(input *DeleteCollectionInput) (*DeleteCollectionOutput, error) {
330	req, out := c.DeleteCollectionRequest(input)
331	return out, req.Send()
332}
333
334// DeleteCollectionWithContext is the same as DeleteCollection with the addition of
335// the ability to pass a context and additional request options.
336//
337// See DeleteCollection for details on how to use this API operation.
338//
339// The context must be non-nil and will be used for request cancellation. If
340// the context is nil a panic will occur. In the future the SDK may create
341// sub-contexts for http.Requests. See https://golang.org/pkg/context/
342// for more information on using Contexts.
343func (c *Rekognition) DeleteCollectionWithContext(ctx aws.Context, input *DeleteCollectionInput, opts ...request.Option) (*DeleteCollectionOutput, error) {
344	req, out := c.DeleteCollectionRequest(input)
345	req.SetContext(ctx)
346	req.ApplyOptions(opts...)
347	return out, req.Send()
348}
349
350const opDeleteFaces = "DeleteFaces"
351
352// DeleteFacesRequest generates a "aws/request.Request" representing the
353// client's request for the DeleteFaces operation. The "output" return
354// value will be populated with the request's response once the request complets
355// successfuly.
356//
357// Use "Send" method on the returned Request to send the API call to the service.
358// the "output" return value is not valid until after Send returns without error.
359//
360// See DeleteFaces for more information on using the DeleteFaces
361// API call, and error handling.
362//
363// This method is useful when you want to inject custom logic or configuration
364// into the SDK's request lifecycle. Such as custom headers, or retry logic.
365//
366//
367//    // Example sending a request using the DeleteFacesRequest method.
368//    req, resp := client.DeleteFacesRequest(params)
369//
370//    err := req.Send()
371//    if err == nil { // resp is now filled
372//        fmt.Println(resp)
373//    }
374func (c *Rekognition) DeleteFacesRequest(input *DeleteFacesInput) (req *request.Request, output *DeleteFacesOutput) {
375	op := &request.Operation{
376		Name:       opDeleteFaces,
377		HTTPMethod: "POST",
378		HTTPPath:   "/",
379	}
380
381	if input == nil {
382		input = &DeleteFacesInput{}
383	}
384
385	output = &DeleteFacesOutput{}
386	req = c.newRequest(op, input, output)
387	return
388}
389
390// DeleteFaces API operation for Amazon Rekognition.
391//
392// Deletes faces from a collection. You specify a collection ID and an array
393// of face IDs to remove from the collection.
394//
395// This operation requires permissions to perform the rekognition:DeleteFaces
396// action.
397//
398// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
399// with awserr.Error's Code and Message methods to get detailed information about
400// the error.
401//
402// See the AWS API reference guide for Amazon Rekognition's
403// API operation DeleteFaces for usage and error information.
404//
405// Returned Error Codes:
406//   * ErrCodeInvalidParameterException "InvalidParameterException"
407//   Input parameter violated a constraint. Validate your parameter before calling
408//   the API operation again.
409//
410//   * ErrCodeAccessDeniedException "AccessDeniedException"
411//   You are not authorized to perform the action.
412//
413//   * ErrCodeInternalServerError "InternalServerError"
414//   Amazon Rekognition experienced a service issue. Try your call again.
415//
416//   * ErrCodeThrottlingException "ThrottlingException"
417//   Amazon Rekognition is temporarily unable to process the request. Try your
418//   call again.
419//
420//   * ErrCodeProvisionedThroughputExceededException "ProvisionedThroughputExceededException"
421//   The number of requests exceeded your throughput limit. If you want to increase
422//   this limit, contact Amazon Rekognition.
423//
424//   * ErrCodeResourceNotFoundException "ResourceNotFoundException"
425//   Collection specified in the request is not found.
426//
427func (c *Rekognition) DeleteFaces(input *DeleteFacesInput) (*DeleteFacesOutput, error) {
428	req, out := c.DeleteFacesRequest(input)
429	return out, req.Send()
430}
431
432// DeleteFacesWithContext is the same as DeleteFaces with the addition of
433// the ability to pass a context and additional request options.
434//
435// See DeleteFaces for details on how to use this API operation.
436//
437// The context must be non-nil and will be used for request cancellation. If
438// the context is nil a panic will occur. In the future the SDK may create
439// sub-contexts for http.Requests. See https://golang.org/pkg/context/
440// for more information on using Contexts.
441func (c *Rekognition) DeleteFacesWithContext(ctx aws.Context, input *DeleteFacesInput, opts ...request.Option) (*DeleteFacesOutput, error) {
442	req, out := c.DeleteFacesRequest(input)
443	req.SetContext(ctx)
444	req.ApplyOptions(opts...)
445	return out, req.Send()
446}
447
448const opDetectFaces = "DetectFaces"
449
450// DetectFacesRequest generates a "aws/request.Request" representing the
451// client's request for the DetectFaces operation. The "output" return
452// value will be populated with the request's response once the request complets
453// successfuly.
454//
455// Use "Send" method on the returned Request to send the API call to the service.
456// the "output" return value is not valid until after Send returns without error.
457//
458// See DetectFaces for more information on using the DetectFaces
459// API call, and error handling.
460//
461// This method is useful when you want to inject custom logic or configuration
462// into the SDK's request lifecycle. Such as custom headers, or retry logic.
463//
464//
465//    // Example sending a request using the DetectFacesRequest method.
466//    req, resp := client.DetectFacesRequest(params)
467//
468//    err := req.Send()
469//    if err == nil { // resp is now filled
470//        fmt.Println(resp)
471//    }
472func (c *Rekognition) DetectFacesRequest(input *DetectFacesInput) (req *request.Request, output *DetectFacesOutput) {
473	op := &request.Operation{
474		Name:       opDetectFaces,
475		HTTPMethod: "POST",
476		HTTPPath:   "/",
477	}
478
479	if input == nil {
480		input = &DetectFacesInput{}
481	}
482
483	output = &DetectFacesOutput{}
484	req = c.newRequest(op, input, output)
485	return
486}
487
488// DetectFaces API operation for Amazon Rekognition.
489//
490// Detects faces within an image (JPEG or PNG) that is provided as input.
491//
492// For each face detected, the operation returns face details including a bounding
493// box of the face, a confidence value (that the bounding box contains a face),
494// and a fixed set of attributes such as facial landmarks (for example, coordinates
495// of eye and mouth), gender, presence of beard, sunglasses, etc.
496//
497// The face-detection algorithm is most effective on frontal faces. For non-frontal
498// or obscured faces, the algorithm may not detect the faces or might detect
499// faces with lower confidence.
500//
501// This is a stateless API operation. That is, the operation does not persist
502// any data.
503//
504// For an example, see get-started-exercise-detect-faces.
505//
506// This operation requires permissions to perform the rekognition:DetectFaces
507// action.
508//
509// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
510// with awserr.Error's Code and Message methods to get detailed information about
511// the error.
512//
513// See the AWS API reference guide for Amazon Rekognition's
514// API operation DetectFaces for usage and error information.
515//
516// Returned Error Codes:
517//   * ErrCodeInvalidS3ObjectException "InvalidS3ObjectException"
518//   Amazon Rekognition is unable to access the S3 object specified in the request.
519//
520//   * ErrCodeInvalidParameterException "InvalidParameterException"
521//   Input parameter violated a constraint. Validate your parameter before calling
522//   the API operation again.
523//
524//   * ErrCodeImageTooLargeException "ImageTooLargeException"
525//   The input image size exceeds the allowed limit. For more information, see
526//   limits.
527//
528//   * ErrCodeAccessDeniedException "AccessDeniedException"
529//   You are not authorized to perform the action.
530//
531//   * ErrCodeInternalServerError "InternalServerError"
532//   Amazon Rekognition experienced a service issue. Try your call again.
533//
534//   * ErrCodeThrottlingException "ThrottlingException"
535//   Amazon Rekognition is temporarily unable to process the request. Try your
536//   call again.
537//
538//   * ErrCodeProvisionedThroughputExceededException "ProvisionedThroughputExceededException"
539//   The number of requests exceeded your throughput limit. If you want to increase
540//   this limit, contact Amazon Rekognition.
541//
542//   * ErrCodeInvalidImageFormatException "InvalidImageFormatException"
543//   The provided image format is not supported.
544//
545func (c *Rekognition) DetectFaces(input *DetectFacesInput) (*DetectFacesOutput, error) {
546	req, out := c.DetectFacesRequest(input)
547	return out, req.Send()
548}
549
550// DetectFacesWithContext is the same as DetectFaces with the addition of
551// the ability to pass a context and additional request options.
552//
553// See DetectFaces for details on how to use this API operation.
554//
555// The context must be non-nil and will be used for request cancellation. If
556// the context is nil a panic will occur. In the future the SDK may create
557// sub-contexts for http.Requests. See https://golang.org/pkg/context/
558// for more information on using Contexts.
559func (c *Rekognition) DetectFacesWithContext(ctx aws.Context, input *DetectFacesInput, opts ...request.Option) (*DetectFacesOutput, error) {
560	req, out := c.DetectFacesRequest(input)
561	req.SetContext(ctx)
562	req.ApplyOptions(opts...)
563	return out, req.Send()
564}
565
566const opDetectLabels = "DetectLabels"
567
568// DetectLabelsRequest generates a "aws/request.Request" representing the
569// client's request for the DetectLabels operation. The "output" return
570// value will be populated with the request's response once the request complets
571// successfuly.
572//
573// Use "Send" method on the returned Request to send the API call to the service.
574// the "output" return value is not valid until after Send returns without error.
575//
576// See DetectLabels for more information on using the DetectLabels
577// API call, and error handling.
578//
579// This method is useful when you want to inject custom logic or configuration
580// into the SDK's request lifecycle. Such as custom headers, or retry logic.
581//
582//
583//    // Example sending a request using the DetectLabelsRequest method.
584//    req, resp := client.DetectLabelsRequest(params)
585//
586//    err := req.Send()
587//    if err == nil { // resp is now filled
588//        fmt.Println(resp)
589//    }
590func (c *Rekognition) DetectLabelsRequest(input *DetectLabelsInput) (req *request.Request, output *DetectLabelsOutput) {
591	op := &request.Operation{
592		Name:       opDetectLabels,
593		HTTPMethod: "POST",
594		HTTPPath:   "/",
595	}
596
597	if input == nil {
598		input = &DetectLabelsInput{}
599	}
600
601	output = &DetectLabelsOutput{}
602	req = c.newRequest(op, input, output)
603	return
604}
605
606// DetectLabels API operation for Amazon Rekognition.
607//
608// Detects instances of real-world labels within an image (JPEG or PNG) provided
609// as input. This includes objects like flower, tree, and table; events like
610// wedding, graduation, and birthday party; and concepts like landscape, evening,
611// and nature. For an example, see get-started-exercise-detect-labels.
612//
613// For each object, scene, and concept the API returns one or more labels. Each
614// label provides the object name, and the level of confidence that the image
615// contains the object. For example, suppose the input image has a lighthouse,
616// the sea, and a rock. The response will include all three labels, one for
617// each object.
618//
619// {Name: lighthouse, Confidence: 98.4629}
620//
621// {Name: rock,Confidence: 79.2097}
622//
623// {Name: sea,Confidence: 75.061}
624//
625// In the preceding example, the operation returns one label for each of the
626// three objects. The operation can also return multiple labels for the same
627// object in the image. For example, if the input image shows a flower (for
628// example, a tulip), the operation might return the following three labels.
629//
630// {Name: flower,Confidence: 99.0562}
631//
632// {Name: plant,Confidence: 99.0562}
633//
634// {Name: tulip,Confidence: 99.0562}
635//
636// In this example, the detection algorithm more precisely identifies the flower
637// as a tulip.
638//
639// You can provide the input image as an S3 object or as base64-encoded bytes.
640// In response, the API returns an array of labels. In addition, the response
641// also includes the orientation correction. Optionally, you can specify MinConfidence
642// to control the confidence threshold for the labels returned. The default
643// is 50%. You can also add the MaxLabels parameter to limit the number of labels
644// returned.
645//
646// If the object detected is a person, the operation doesn't provide the same
647// facial details that the DetectFaces operation provides.
648//
649// This is a stateless API operation. That is, the operation does not persist
650// any data.
651//
652// This operation requires permissions to perform the rekognition:DetectLabels
653// action.
654//
655// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
656// with awserr.Error's Code and Message methods to get detailed information about
657// the error.
658//
659// See the AWS API reference guide for Amazon Rekognition's
660// API operation DetectLabels for usage and error information.
661//
662// Returned Error Codes:
663//   * ErrCodeInvalidS3ObjectException "InvalidS3ObjectException"
664//   Amazon Rekognition is unable to access the S3 object specified in the request.
665//
666//   * ErrCodeInvalidParameterException "InvalidParameterException"
667//   Input parameter violated a constraint. Validate your parameter before calling
668//   the API operation again.
669//
670//   * ErrCodeImageTooLargeException "ImageTooLargeException"
671//   The input image size exceeds the allowed limit. For more information, see
672//   limits.
673//
674//   * ErrCodeAccessDeniedException "AccessDeniedException"
675//   You are not authorized to perform the action.
676//
677//   * ErrCodeInternalServerError "InternalServerError"
678//   Amazon Rekognition experienced a service issue. Try your call again.
679//
680//   * ErrCodeThrottlingException "ThrottlingException"
681//   Amazon Rekognition is temporarily unable to process the request. Try your
682//   call again.
683//
684//   * ErrCodeProvisionedThroughputExceededException "ProvisionedThroughputExceededException"
685//   The number of requests exceeded your throughput limit. If you want to increase
686//   this limit, contact Amazon Rekognition.
687//
688//   * ErrCodeInvalidImageFormatException "InvalidImageFormatException"
689//   The provided image format is not supported.
690//
691func (c *Rekognition) DetectLabels(input *DetectLabelsInput) (*DetectLabelsOutput, error) {
692	req, out := c.DetectLabelsRequest(input)
693	return out, req.Send()
694}
695
696// DetectLabelsWithContext is the same as DetectLabels with the addition of
697// the ability to pass a context and additional request options.
698//
699// See DetectLabels for details on how to use this API operation.
700//
701// The context must be non-nil and will be used for request cancellation. If
702// the context is nil a panic will occur. In the future the SDK may create
703// sub-contexts for http.Requests. See https://golang.org/pkg/context/
704// for more information on using Contexts.
705func (c *Rekognition) DetectLabelsWithContext(ctx aws.Context, input *DetectLabelsInput, opts ...request.Option) (*DetectLabelsOutput, error) {
706	req, out := c.DetectLabelsRequest(input)
707	req.SetContext(ctx)
708	req.ApplyOptions(opts...)
709	return out, req.Send()
710}
711
712const opDetectModerationLabels = "DetectModerationLabels"
713
714// DetectModerationLabelsRequest generates a "aws/request.Request" representing the
715// client's request for the DetectModerationLabels operation. The "output" return
716// value will be populated with the request's response once the request complets
717// successfuly.
718//
719// Use "Send" method on the returned Request to send the API call to the service.
720// the "output" return value is not valid until after Send returns without error.
721//
722// See DetectModerationLabels for more information on using the DetectModerationLabels
723// API call, and error handling.
724//
725// This method is useful when you want to inject custom logic or configuration
726// into the SDK's request lifecycle. Such as custom headers, or retry logic.
727//
728//
729//    // Example sending a request using the DetectModerationLabelsRequest method.
730//    req, resp := client.DetectModerationLabelsRequest(params)
731//
732//    err := req.Send()
733//    if err == nil { // resp is now filled
734//        fmt.Println(resp)
735//    }
736func (c *Rekognition) DetectModerationLabelsRequest(input *DetectModerationLabelsInput) (req *request.Request, output *DetectModerationLabelsOutput) {
737	op := &request.Operation{
738		Name:       opDetectModerationLabels,
739		HTTPMethod: "POST",
740		HTTPPath:   "/",
741	}
742
743	if input == nil {
744		input = &DetectModerationLabelsInput{}
745	}
746
747	output = &DetectModerationLabelsOutput{}
748	req = c.newRequest(op, input, output)
749	return
750}
751
752// DetectModerationLabels API operation for Amazon Rekognition.
753//
754// Detects explicit or suggestive adult content in a specified JPEG or PNG format
755// image. Use DetectModerationLabels to moderate images depending on your requirements.
756// For example, you might want to filter images that contain nudity, but not
757// images containing suggestive content.
758//
759// To filter images, use the labels returned by DetectModerationLabels to determine
760// which types of content are appropriate. For information about moderation
761// labels, see image-moderation.
762//
763// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
764// with awserr.Error's Code and Message methods to get detailed information about
765// the error.
766//
767// See the AWS API reference guide for Amazon Rekognition's
768// API operation DetectModerationLabels for usage and error information.
769//
770// Returned Error Codes:
771//   * ErrCodeInvalidS3ObjectException "InvalidS3ObjectException"
772//   Amazon Rekognition is unable to access the S3 object specified in the request.
773//
774//   * ErrCodeInvalidParameterException "InvalidParameterException"
775//   Input parameter violated a constraint. Validate your parameter before calling
776//   the API operation again.
777//
778//   * ErrCodeImageTooLargeException "ImageTooLargeException"
779//   The input image size exceeds the allowed limit. For more information, see
780//   limits.
781//
782//   * ErrCodeAccessDeniedException "AccessDeniedException"
783//   You are not authorized to perform the action.
784//
785//   * ErrCodeInternalServerError "InternalServerError"
786//   Amazon Rekognition experienced a service issue. Try your call again.
787//
788//   * ErrCodeThrottlingException "ThrottlingException"
789//   Amazon Rekognition is temporarily unable to process the request. Try your
790//   call again.
791//
792//   * ErrCodeProvisionedThroughputExceededException "ProvisionedThroughputExceededException"
793//   The number of requests exceeded your throughput limit. If you want to increase
794//   this limit, contact Amazon Rekognition.
795//
796//   * ErrCodeInvalidImageFormatException "InvalidImageFormatException"
797//   The provided image format is not supported.
798//
799func (c *Rekognition) DetectModerationLabels(input *DetectModerationLabelsInput) (*DetectModerationLabelsOutput, error) {
800	req, out := c.DetectModerationLabelsRequest(input)
801	return out, req.Send()
802}
803
804// DetectModerationLabelsWithContext is the same as DetectModerationLabels with the addition of
805// the ability to pass a context and additional request options.
806//
807// See DetectModerationLabels for details on how to use this API operation.
808//
809// The context must be non-nil and will be used for request cancellation. If
810// the context is nil a panic will occur. In the future the SDK may create
811// sub-contexts for http.Requests. See https://golang.org/pkg/context/
812// for more information on using Contexts.
813func (c *Rekognition) DetectModerationLabelsWithContext(ctx aws.Context, input *DetectModerationLabelsInput, opts ...request.Option) (*DetectModerationLabelsOutput, error) {
814	req, out := c.DetectModerationLabelsRequest(input)
815	req.SetContext(ctx)
816	req.ApplyOptions(opts...)
817	return out, req.Send()
818}
819
820const opGetCelebrityInfo = "GetCelebrityInfo"
821
822// GetCelebrityInfoRequest generates a "aws/request.Request" representing the
823// client's request for the GetCelebrityInfo operation. The "output" return
824// value will be populated with the request's response once the request complets
825// successfuly.
826//
827// Use "Send" method on the returned Request to send the API call to the service.
828// the "output" return value is not valid until after Send returns without error.
829//
830// See GetCelebrityInfo for more information on using the GetCelebrityInfo
831// API call, and error handling.
832//
833// This method is useful when you want to inject custom logic or configuration
834// into the SDK's request lifecycle. Such as custom headers, or retry logic.
835//
836//
837//    // Example sending a request using the GetCelebrityInfoRequest method.
838//    req, resp := client.GetCelebrityInfoRequest(params)
839//
840//    err := req.Send()
841//    if err == nil { // resp is now filled
842//        fmt.Println(resp)
843//    }
844func (c *Rekognition) GetCelebrityInfoRequest(input *GetCelebrityInfoInput) (req *request.Request, output *GetCelebrityInfoOutput) {
845	op := &request.Operation{
846		Name:       opGetCelebrityInfo,
847		HTTPMethod: "POST",
848		HTTPPath:   "/",
849	}
850
851	if input == nil {
852		input = &GetCelebrityInfoInput{}
853	}
854
855	output = &GetCelebrityInfoOutput{}
856	req = c.newRequest(op, input, output)
857	return
858}
859
860// GetCelebrityInfo API operation for Amazon Rekognition.
861//
862// Gets the name and additional information about a celebrity based on his or
863// her Rekognition ID. The additional information is returned as an array of
864// URLs. If there is no additional information about the celebrity, this list
865// is empty. For more information, see celebrity-recognition.
866//
867// This operation requires permissions to perform the rekognition:GetCelebrityInfo
868// action.
869//
870// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
871// with awserr.Error's Code and Message methods to get detailed information about
872// the error.
873//
874// See the AWS API reference guide for Amazon Rekognition's
875// API operation GetCelebrityInfo for usage and error information.
876//
877// Returned Error Codes:
878//   * ErrCodeInvalidParameterException "InvalidParameterException"
879//   Input parameter violated a constraint. Validate your parameter before calling
880//   the API operation again.
881//
882//   * ErrCodeAccessDeniedException "AccessDeniedException"
883//   You are not authorized to perform the action.
884//
885//   * ErrCodeInternalServerError "InternalServerError"
886//   Amazon Rekognition experienced a service issue. Try your call again.
887//
888//   * ErrCodeThrottlingException "ThrottlingException"
889//   Amazon Rekognition is temporarily unable to process the request. Try your
890//   call again.
891//
892//   * ErrCodeProvisionedThroughputExceededException "ProvisionedThroughputExceededException"
893//   The number of requests exceeded your throughput limit. If you want to increase
894//   this limit, contact Amazon Rekognition.
895//
896//   * ErrCodeResourceNotFoundException "ResourceNotFoundException"
897//   Collection specified in the request is not found.
898//
899func (c *Rekognition) GetCelebrityInfo(input *GetCelebrityInfoInput) (*GetCelebrityInfoOutput, error) {
900	req, out := c.GetCelebrityInfoRequest(input)
901	return out, req.Send()
902}
903
904// GetCelebrityInfoWithContext is the same as GetCelebrityInfo with the addition of
905// the ability to pass a context and additional request options.
906//
907// See GetCelebrityInfo for details on how to use this API operation.
908//
909// The context must be non-nil and will be used for request cancellation. If
910// the context is nil a panic will occur. In the future the SDK may create
911// sub-contexts for http.Requests. See https://golang.org/pkg/context/
912// for more information on using Contexts.
913func (c *Rekognition) GetCelebrityInfoWithContext(ctx aws.Context, input *GetCelebrityInfoInput, opts ...request.Option) (*GetCelebrityInfoOutput, error) {
914	req, out := c.GetCelebrityInfoRequest(input)
915	req.SetContext(ctx)
916	req.ApplyOptions(opts...)
917	return out, req.Send()
918}
919
920const opIndexFaces = "IndexFaces"
921
922// IndexFacesRequest generates a "aws/request.Request" representing the
923// client's request for the IndexFaces operation. The "output" return
924// value will be populated with the request's response once the request complets
925// successfuly.
926//
927// Use "Send" method on the returned Request to send the API call to the service.
928// the "output" return value is not valid until after Send returns without error.
929//
930// See IndexFaces for more information on using the IndexFaces
931// API call, and error handling.
932//
933// This method is useful when you want to inject custom logic or configuration
934// into the SDK's request lifecycle. Such as custom headers, or retry logic.
935//
936//
937//    // Example sending a request using the IndexFacesRequest method.
938//    req, resp := client.IndexFacesRequest(params)
939//
940//    err := req.Send()
941//    if err == nil { // resp is now filled
942//        fmt.Println(resp)
943//    }
944func (c *Rekognition) IndexFacesRequest(input *IndexFacesInput) (req *request.Request, output *IndexFacesOutput) {
945	op := &request.Operation{
946		Name:       opIndexFaces,
947		HTTPMethod: "POST",
948		HTTPPath:   "/",
949	}
950
951	if input == nil {
952		input = &IndexFacesInput{}
953	}
954
955	output = &IndexFacesOutput{}
956	req = c.newRequest(op, input, output)
957	return
958}
959
960// IndexFaces API operation for Amazon Rekognition.
961//
962// Detects faces in the input image and adds them to the specified collection.
963//
964// Amazon Rekognition does not save the actual faces detected. Instead, the
965// underlying detection algorithm first detects the faces in the input image,
966// and for each face extracts facial features into a feature vector, and stores
967// it in the back-end database. Amazon Rekognition uses feature vectors when
968// performing face match and search operations using the and operations.
969//
970// If you provide the optional externalImageID for the input image you provided,
971// Amazon Rekognition associates this ID with all faces that it detects. When
972// you call the operation, the response returns the external ID. You can use
973// this external image ID to create a client-side index to associate the faces
974// with each image. You can then use the index to find all faces in an image.
975//
976// In response, the operation returns an array of metadata for all detected
977// faces. This includes, the bounding box of the detected face, confidence value
978// (indicating the bounding box contains a face), a face ID assigned by the
979// service for each face that is detected and stored, and an image ID assigned
980// by the service for the input image. If you request all facial attributes
981// (using the detectionAttributes parameter, Amazon Rekognition returns detailed
982// facial attributes such as facial landmarks (for example, location of eye
983// and mount) and other facial attributes such gender. If you provide the same
984// image, specify the same collection, and use the same external ID in the IndexFaces
985// operation, Amazon Rekognition doesn't save duplicate face metadata.
986//
987// For an example, see example2.
988//
989// This operation requires permissions to perform the rekognition:IndexFaces
990// action.
991//
992// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
993// with awserr.Error's Code and Message methods to get detailed information about
994// the error.
995//
996// See the AWS API reference guide for Amazon Rekognition's
997// API operation IndexFaces for usage and error information.
998//
999// Returned Error Codes:
1000//   * ErrCodeInvalidS3ObjectException "InvalidS3ObjectException"
1001//   Amazon Rekognition is unable to access the S3 object specified in the request.
1002//
1003//   * ErrCodeInvalidParameterException "InvalidParameterException"
1004//   Input parameter violated a constraint. Validate your parameter before calling
1005//   the API operation again.
1006//
1007//   * ErrCodeImageTooLargeException "ImageTooLargeException"
1008//   The input image size exceeds the allowed limit. For more information, see
1009//   limits.
1010//
1011//   * ErrCodeAccessDeniedException "AccessDeniedException"
1012//   You are not authorized to perform the action.
1013//
1014//   * ErrCodeInternalServerError "InternalServerError"
1015//   Amazon Rekognition experienced a service issue. Try your call again.
1016//
1017//   * ErrCodeThrottlingException "ThrottlingException"
1018//   Amazon Rekognition is temporarily unable to process the request. Try your
1019//   call again.
1020//
1021//   * ErrCodeProvisionedThroughputExceededException "ProvisionedThroughputExceededException"
1022//   The number of requests exceeded your throughput limit. If you want to increase
1023//   this limit, contact Amazon Rekognition.
1024//
1025//   * ErrCodeResourceNotFoundException "ResourceNotFoundException"
1026//   Collection specified in the request is not found.
1027//
1028//   * ErrCodeInvalidImageFormatException "InvalidImageFormatException"
1029//   The provided image format is not supported.
1030//
1031func (c *Rekognition) IndexFaces(input *IndexFacesInput) (*IndexFacesOutput, error) {
1032	req, out := c.IndexFacesRequest(input)
1033	return out, req.Send()
1034}
1035
1036// IndexFacesWithContext is the same as IndexFaces with the addition of
1037// the ability to pass a context and additional request options.
1038//
1039// See IndexFaces for details on how to use this API operation.
1040//
1041// The context must be non-nil and will be used for request cancellation. If
1042// the context is nil a panic will occur. In the future the SDK may create
1043// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1044// for more information on using Contexts.
1045func (c *Rekognition) IndexFacesWithContext(ctx aws.Context, input *IndexFacesInput, opts ...request.Option) (*IndexFacesOutput, error) {
1046	req, out := c.IndexFacesRequest(input)
1047	req.SetContext(ctx)
1048	req.ApplyOptions(opts...)
1049	return out, req.Send()
1050}
1051
1052const opListCollections = "ListCollections"
1053
1054// ListCollectionsRequest generates a "aws/request.Request" representing the
1055// client's request for the ListCollections operation. The "output" return
1056// value will be populated with the request's response once the request complets
1057// successfuly.
1058//
1059// Use "Send" method on the returned Request to send the API call to the service.
1060// the "output" return value is not valid until after Send returns without error.
1061//
1062// See ListCollections for more information on using the ListCollections
1063// API call, and error handling.
1064//
1065// This method is useful when you want to inject custom logic or configuration
1066// into the SDK's request lifecycle. Such as custom headers, or retry logic.
1067//
1068//
1069//    // Example sending a request using the ListCollectionsRequest method.
1070//    req, resp := client.ListCollectionsRequest(params)
1071//
1072//    err := req.Send()
1073//    if err == nil { // resp is now filled
1074//        fmt.Println(resp)
1075//    }
1076func (c *Rekognition) ListCollectionsRequest(input *ListCollectionsInput) (req *request.Request, output *ListCollectionsOutput) {
1077	op := &request.Operation{
1078		Name:       opListCollections,
1079		HTTPMethod: "POST",
1080		HTTPPath:   "/",
1081		Paginator: &request.Paginator{
1082			InputTokens:     []string{"NextToken"},
1083			OutputTokens:    []string{"NextToken"},
1084			LimitToken:      "MaxResults",
1085			TruncationToken: "",
1086		},
1087	}
1088
1089	if input == nil {
1090		input = &ListCollectionsInput{}
1091	}
1092
1093	output = &ListCollectionsOutput{}
1094	req = c.newRequest(op, input, output)
1095	return
1096}
1097
1098// ListCollections API operation for Amazon Rekognition.
1099//
1100// Returns list of collection IDs in your account. If the result is truncated,
1101// the response also provides a NextToken that you can use in the subsequent
1102// request to fetch the next set of collection IDs.
1103//
1104// For an example, see example1.
1105//
1106// This operation requires permissions to perform the rekognition:ListCollections
1107// action.
1108//
1109// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
1110// with awserr.Error's Code and Message methods to get detailed information about
1111// the error.
1112//
1113// See the AWS API reference guide for Amazon Rekognition's
1114// API operation ListCollections for usage and error information.
1115//
1116// Returned Error Codes:
1117//   * ErrCodeInvalidParameterException "InvalidParameterException"
1118//   Input parameter violated a constraint. Validate your parameter before calling
1119//   the API operation again.
1120//
1121//   * ErrCodeAccessDeniedException "AccessDeniedException"
1122//   You are not authorized to perform the action.
1123//
1124//   * ErrCodeInternalServerError "InternalServerError"
1125//   Amazon Rekognition experienced a service issue. Try your call again.
1126//
1127//   * ErrCodeThrottlingException "ThrottlingException"
1128//   Amazon Rekognition is temporarily unable to process the request. Try your
1129//   call again.
1130//
1131//   * ErrCodeProvisionedThroughputExceededException "ProvisionedThroughputExceededException"
1132//   The number of requests exceeded your throughput limit. If you want to increase
1133//   this limit, contact Amazon Rekognition.
1134//
1135//   * ErrCodeInvalidPaginationTokenException "InvalidPaginationTokenException"
1136//   Pagination token in the request is not valid.
1137//
1138//   * ErrCodeResourceNotFoundException "ResourceNotFoundException"
1139//   Collection specified in the request is not found.
1140//
1141func (c *Rekognition) ListCollections(input *ListCollectionsInput) (*ListCollectionsOutput, error) {
1142	req, out := c.ListCollectionsRequest(input)
1143	return out, req.Send()
1144}
1145
1146// ListCollectionsWithContext is the same as ListCollections with the addition of
1147// the ability to pass a context and additional request options.
1148//
1149// See ListCollections for details on how to use this API operation.
1150//
1151// The context must be non-nil and will be used for request cancellation. If
1152// the context is nil a panic will occur. In the future the SDK may create
1153// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1154// for more information on using Contexts.
1155func (c *Rekognition) ListCollectionsWithContext(ctx aws.Context, input *ListCollectionsInput, opts ...request.Option) (*ListCollectionsOutput, error) {
1156	req, out := c.ListCollectionsRequest(input)
1157	req.SetContext(ctx)
1158	req.ApplyOptions(opts...)
1159	return out, req.Send()
1160}
1161
1162// ListCollectionsPages iterates over the pages of a ListCollections operation,
1163// calling the "fn" function with the response data for each page. To stop
1164// iterating, return false from the fn function.
1165//
1166// See ListCollections method for more information on how to use this operation.
1167//
1168// Note: This operation can generate multiple requests to a service.
1169//
1170//    // Example iterating over at most 3 pages of a ListCollections operation.
1171//    pageNum := 0
1172//    err := client.ListCollectionsPages(params,
1173//        func(page *ListCollectionsOutput, lastPage bool) bool {
1174//            pageNum++
1175//            fmt.Println(page)
1176//            return pageNum <= 3
1177//        })
1178//
1179func (c *Rekognition) ListCollectionsPages(input *ListCollectionsInput, fn func(*ListCollectionsOutput, bool) bool) error {
1180	return c.ListCollectionsPagesWithContext(aws.BackgroundContext(), input, fn)
1181}
1182
1183// ListCollectionsPagesWithContext same as ListCollectionsPages except
1184// it takes a Context and allows setting request options on the pages.
1185//
1186// The context must be non-nil and will be used for request cancellation. If
1187// the context is nil a panic will occur. In the future the SDK may create
1188// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1189// for more information on using Contexts.
1190func (c *Rekognition) ListCollectionsPagesWithContext(ctx aws.Context, input *ListCollectionsInput, fn func(*ListCollectionsOutput, bool) bool, opts ...request.Option) error {
1191	p := request.Pagination{
1192		NewRequest: func() (*request.Request, error) {
1193			var inCpy *ListCollectionsInput
1194			if input != nil {
1195				tmp := *input
1196				inCpy = &tmp
1197			}
1198			req, _ := c.ListCollectionsRequest(inCpy)
1199			req.SetContext(ctx)
1200			req.ApplyOptions(opts...)
1201			return req, nil
1202		},
1203	}
1204
1205	cont := true
1206	for p.Next() && cont {
1207		cont = fn(p.Page().(*ListCollectionsOutput), !p.HasNextPage())
1208	}
1209	return p.Err()
1210}
1211
1212const opListFaces = "ListFaces"
1213
1214// ListFacesRequest generates a "aws/request.Request" representing the
1215// client's request for the ListFaces operation. The "output" return
1216// value will be populated with the request's response once the request complets
1217// successfuly.
1218//
1219// Use "Send" method on the returned Request to send the API call to the service.
1220// the "output" return value is not valid until after Send returns without error.
1221//
1222// See ListFaces for more information on using the ListFaces
1223// API call, and error handling.
1224//
1225// This method is useful when you want to inject custom logic or configuration
1226// into the SDK's request lifecycle. Such as custom headers, or retry logic.
1227//
1228//
1229//    // Example sending a request using the ListFacesRequest method.
1230//    req, resp := client.ListFacesRequest(params)
1231//
1232//    err := req.Send()
1233//    if err == nil { // resp is now filled
1234//        fmt.Println(resp)
1235//    }
1236func (c *Rekognition) ListFacesRequest(input *ListFacesInput) (req *request.Request, output *ListFacesOutput) {
1237	op := &request.Operation{
1238		Name:       opListFaces,
1239		HTTPMethod: "POST",
1240		HTTPPath:   "/",
1241		Paginator: &request.Paginator{
1242			InputTokens:     []string{"NextToken"},
1243			OutputTokens:    []string{"NextToken"},
1244			LimitToken:      "MaxResults",
1245			TruncationToken: "",
1246		},
1247	}
1248
1249	if input == nil {
1250		input = &ListFacesInput{}
1251	}
1252
1253	output = &ListFacesOutput{}
1254	req = c.newRequest(op, input, output)
1255	return
1256}
1257
1258// ListFaces API operation for Amazon Rekognition.
1259//
1260// Returns metadata for faces in the specified collection. This metadata includes
1261// information such as the bounding box coordinates, the confidence (that the
1262// bounding box contains a face), and face ID. For an example, see example3.
1263//
1264// This operation requires permissions to perform the rekognition:ListFaces
1265// action.
1266//
1267// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
1268// with awserr.Error's Code and Message methods to get detailed information about
1269// the error.
1270//
1271// See the AWS API reference guide for Amazon Rekognition's
1272// API operation ListFaces for usage and error information.
1273//
1274// Returned Error Codes:
1275//   * ErrCodeInvalidParameterException "InvalidParameterException"
1276//   Input parameter violated a constraint. Validate your parameter before calling
1277//   the API operation again.
1278//
1279//   * ErrCodeAccessDeniedException "AccessDeniedException"
1280//   You are not authorized to perform the action.
1281//
1282//   * ErrCodeInternalServerError "InternalServerError"
1283//   Amazon Rekognition experienced a service issue. Try your call again.
1284//
1285//   * ErrCodeThrottlingException "ThrottlingException"
1286//   Amazon Rekognition is temporarily unable to process the request. Try your
1287//   call again.
1288//
1289//   * ErrCodeProvisionedThroughputExceededException "ProvisionedThroughputExceededException"
1290//   The number of requests exceeded your throughput limit. If you want to increase
1291//   this limit, contact Amazon Rekognition.
1292//
1293//   * ErrCodeInvalidPaginationTokenException "InvalidPaginationTokenException"
1294//   Pagination token in the request is not valid.
1295//
1296//   * ErrCodeResourceNotFoundException "ResourceNotFoundException"
1297//   Collection specified in the request is not found.
1298//
1299func (c *Rekognition) ListFaces(input *ListFacesInput) (*ListFacesOutput, error) {
1300	req, out := c.ListFacesRequest(input)
1301	return out, req.Send()
1302}
1303
1304// ListFacesWithContext is the same as ListFaces with the addition of
1305// the ability to pass a context and additional request options.
1306//
1307// See ListFaces for details on how to use this API operation.
1308//
1309// The context must be non-nil and will be used for request cancellation. If
1310// the context is nil a panic will occur. In the future the SDK may create
1311// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1312// for more information on using Contexts.
1313func (c *Rekognition) ListFacesWithContext(ctx aws.Context, input *ListFacesInput, opts ...request.Option) (*ListFacesOutput, error) {
1314	req, out := c.ListFacesRequest(input)
1315	req.SetContext(ctx)
1316	req.ApplyOptions(opts...)
1317	return out, req.Send()
1318}
1319
1320// ListFacesPages iterates over the pages of a ListFaces operation,
1321// calling the "fn" function with the response data for each page. To stop
1322// iterating, return false from the fn function.
1323//
1324// See ListFaces method for more information on how to use this operation.
1325//
1326// Note: This operation can generate multiple requests to a service.
1327//
1328//    // Example iterating over at most 3 pages of a ListFaces operation.
1329//    pageNum := 0
1330//    err := client.ListFacesPages(params,
1331//        func(page *ListFacesOutput, lastPage bool) bool {
1332//            pageNum++
1333//            fmt.Println(page)
1334//            return pageNum <= 3
1335//        })
1336//
1337func (c *Rekognition) ListFacesPages(input *ListFacesInput, fn func(*ListFacesOutput, bool) bool) error {
1338	return c.ListFacesPagesWithContext(aws.BackgroundContext(), input, fn)
1339}
1340
1341// ListFacesPagesWithContext same as ListFacesPages except
1342// it takes a Context and allows setting request options on the pages.
1343//
1344// The context must be non-nil and will be used for request cancellation. If
1345// the context is nil a panic will occur. In the future the SDK may create
1346// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1347// for more information on using Contexts.
1348func (c *Rekognition) ListFacesPagesWithContext(ctx aws.Context, input *ListFacesInput, fn func(*ListFacesOutput, bool) bool, opts ...request.Option) error {
1349	p := request.Pagination{
1350		NewRequest: func() (*request.Request, error) {
1351			var inCpy *ListFacesInput
1352			if input != nil {
1353				tmp := *input
1354				inCpy = &tmp
1355			}
1356			req, _ := c.ListFacesRequest(inCpy)
1357			req.SetContext(ctx)
1358			req.ApplyOptions(opts...)
1359			return req, nil
1360		},
1361	}
1362
1363	cont := true
1364	for p.Next() && cont {
1365		cont = fn(p.Page().(*ListFacesOutput), !p.HasNextPage())
1366	}
1367	return p.Err()
1368}
1369
1370const opRecognizeCelebrities = "RecognizeCelebrities"
1371
1372// RecognizeCelebritiesRequest generates a "aws/request.Request" representing the
1373// client's request for the RecognizeCelebrities operation. The "output" return
1374// value will be populated with the request's response once the request complets
1375// successfuly.
1376//
1377// Use "Send" method on the returned Request to send the API call to the service.
1378// the "output" return value is not valid until after Send returns without error.
1379//
1380// See RecognizeCelebrities for more information on using the RecognizeCelebrities
1381// API call, and error handling.
1382//
1383// This method is useful when you want to inject custom logic or configuration
1384// into the SDK's request lifecycle. Such as custom headers, or retry logic.
1385//
1386//
1387//    // Example sending a request using the RecognizeCelebritiesRequest method.
1388//    req, resp := client.RecognizeCelebritiesRequest(params)
1389//
1390//    err := req.Send()
1391//    if err == nil { // resp is now filled
1392//        fmt.Println(resp)
1393//    }
1394func (c *Rekognition) RecognizeCelebritiesRequest(input *RecognizeCelebritiesInput) (req *request.Request, output *RecognizeCelebritiesOutput) {
1395	op := &request.Operation{
1396		Name:       opRecognizeCelebrities,
1397		HTTPMethod: "POST",
1398		HTTPPath:   "/",
1399	}
1400
1401	if input == nil {
1402		input = &RecognizeCelebritiesInput{}
1403	}
1404
1405	output = &RecognizeCelebritiesOutput{}
1406	req = c.newRequest(op, input, output)
1407	return
1408}
1409
1410// RecognizeCelebrities API operation for Amazon Rekognition.
1411//
1412// Returns an array of celebrities recognized in the input image. The image
1413// is passed either as base64-encoded image bytes or as a reference to an image
1414// in an Amazon S3 bucket. The image must be either a PNG or JPEG formatted
1415// file. For more information, see celebrity-recognition.
1416//
1417// RecognizeCelebrities returns the 15 largest faces in the image. It lists
1418// recognized celebrities in the CelebrityFaces list and unrecognized faces
1419// in the UnrecognizedFaces list. The operation doesn't return celebrities whose
1420// face sizes are smaller than the largest 15 faces in the image.
1421//
1422// For each celebrity recognized, the API returns a Celebrity object. The Celebrity
1423// object contains the celebrity name, ID, URL links to additional information,
1424// match confidence, and a ComparedFace object that you can use to locate the
1425// celebrity's face on the image.
1426//
1427// Rekognition does not retain information about which images a celebrity has
1428// been recognized in. Your application must store this information and use
1429// the Celebrity ID property as a unique identifier for the celebrity. If you
1430// don't store the celebrity name or additional information URLs returned by
1431// RecognizeCelebrities, you will need the ID to identify the celebrity in a
1432// call to the operation.
1433//
1434// For an example, see recognize-celebrities-tutorial.
1435//
1436// This operation requires permissions to perform the rekognition:RecognizeCelebrities
1437// operation.
1438//
1439// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
1440// with awserr.Error's Code and Message methods to get detailed information about
1441// the error.
1442//
1443// See the AWS API reference guide for Amazon Rekognition's
1444// API operation RecognizeCelebrities for usage and error information.
1445//
1446// Returned Error Codes:
1447//   * ErrCodeInvalidS3ObjectException "InvalidS3ObjectException"
1448//   Amazon Rekognition is unable to access the S3 object specified in the request.
1449//
1450//   * ErrCodeInvalidParameterException "InvalidParameterException"
1451//   Input parameter violated a constraint. Validate your parameter before calling
1452//   the API operation again.
1453//
1454//   * ErrCodeInvalidImageFormatException "InvalidImageFormatException"
1455//   The provided image format is not supported.
1456//
1457//   * ErrCodeImageTooLargeException "ImageTooLargeException"
1458//   The input image size exceeds the allowed limit. For more information, see
1459//   limits.
1460//
1461//   * ErrCodeAccessDeniedException "AccessDeniedException"
1462//   You are not authorized to perform the action.
1463//
1464//   * ErrCodeInternalServerError "InternalServerError"
1465//   Amazon Rekognition experienced a service issue. Try your call again.
1466//
1467//   * ErrCodeThrottlingException "ThrottlingException"
1468//   Amazon Rekognition is temporarily unable to process the request. Try your
1469//   call again.
1470//
1471//   * ErrCodeProvisionedThroughputExceededException "ProvisionedThroughputExceededException"
1472//   The number of requests exceeded your throughput limit. If you want to increase
1473//   this limit, contact Amazon Rekognition.
1474//
1475//   * ErrCodeInvalidImageFormatException "InvalidImageFormatException"
1476//   The provided image format is not supported.
1477//
1478func (c *Rekognition) RecognizeCelebrities(input *RecognizeCelebritiesInput) (*RecognizeCelebritiesOutput, error) {
1479	req, out := c.RecognizeCelebritiesRequest(input)
1480	return out, req.Send()
1481}
1482
1483// RecognizeCelebritiesWithContext is the same as RecognizeCelebrities with the addition of
1484// the ability to pass a context and additional request options.
1485//
1486// See RecognizeCelebrities for details on how to use this API operation.
1487//
1488// The context must be non-nil and will be used for request cancellation. If
1489// the context is nil a panic will occur. In the future the SDK may create
1490// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1491// for more information on using Contexts.
1492func (c *Rekognition) RecognizeCelebritiesWithContext(ctx aws.Context, input *RecognizeCelebritiesInput, opts ...request.Option) (*RecognizeCelebritiesOutput, error) {
1493	req, out := c.RecognizeCelebritiesRequest(input)
1494	req.SetContext(ctx)
1495	req.ApplyOptions(opts...)
1496	return out, req.Send()
1497}
1498
1499const opSearchFaces = "SearchFaces"
1500
1501// SearchFacesRequest generates a "aws/request.Request" representing the
1502// client's request for the SearchFaces operation. The "output" return
1503// value will be populated with the request's response once the request complets
1504// successfuly.
1505//
1506// Use "Send" method on the returned Request to send the API call to the service.
1507// the "output" return value is not valid until after Send returns without error.
1508//
1509// See SearchFaces for more information on using the SearchFaces
1510// API call, and error handling.
1511//
1512// This method is useful when you want to inject custom logic or configuration
1513// into the SDK's request lifecycle. Such as custom headers, or retry logic.
1514//
1515//
1516//    // Example sending a request using the SearchFacesRequest method.
1517//    req, resp := client.SearchFacesRequest(params)
1518//
1519//    err := req.Send()
1520//    if err == nil { // resp is now filled
1521//        fmt.Println(resp)
1522//    }
1523func (c *Rekognition) SearchFacesRequest(input *SearchFacesInput) (req *request.Request, output *SearchFacesOutput) {
1524	op := &request.Operation{
1525		Name:       opSearchFaces,
1526		HTTPMethod: "POST",
1527		HTTPPath:   "/",
1528	}
1529
1530	if input == nil {
1531		input = &SearchFacesInput{}
1532	}
1533
1534	output = &SearchFacesOutput{}
1535	req = c.newRequest(op, input, output)
1536	return
1537}
1538
1539// SearchFaces API operation for Amazon Rekognition.
1540//
1541// For a given input face ID, searches for matching faces in the collection
1542// the face belongs to. You get a face ID when you add a face to the collection
1543// using the IndexFaces operation. The operation compares the features of the
1544// input face with faces in the specified collection.
1545//
1546// You can also search faces without indexing faces by using the SearchFacesByImage
1547// operation.
1548//
1549// The operation response returns an array of faces that match, ordered by similarity
1550// score with the highest similarity first. More specifically, it is an array
1551// of metadata for each face match that is found. Along with the metadata, the
1552// response also includes a confidence value for each face match, indicating
1553// the confidence that the specific face matches the input face.
1554//
1555// For an example, see example3.
1556//
1557// This operation requires permissions to perform the rekognition:SearchFaces
1558// action.
1559//
1560// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
1561// with awserr.Error's Code and Message methods to get detailed information about
1562// the error.
1563//
1564// See the AWS API reference guide for Amazon Rekognition's
1565// API operation SearchFaces for usage and error information.
1566//
1567// Returned Error Codes:
1568//   * ErrCodeInvalidParameterException "InvalidParameterException"
1569//   Input parameter violated a constraint. Validate your parameter before calling
1570//   the API operation again.
1571//
1572//   * ErrCodeAccessDeniedException "AccessDeniedException"
1573//   You are not authorized to perform the action.
1574//
1575//   * ErrCodeInternalServerError "InternalServerError"
1576//   Amazon Rekognition experienced a service issue. Try your call again.
1577//
1578//   * ErrCodeThrottlingException "ThrottlingException"
1579//   Amazon Rekognition is temporarily unable to process the request. Try your
1580//   call again.
1581//
1582//   * ErrCodeProvisionedThroughputExceededException "ProvisionedThroughputExceededException"
1583//   The number of requests exceeded your throughput limit. If you want to increase
1584//   this limit, contact Amazon Rekognition.
1585//
1586//   * ErrCodeResourceNotFoundException "ResourceNotFoundException"
1587//   Collection specified in the request is not found.
1588//
1589func (c *Rekognition) SearchFaces(input *SearchFacesInput) (*SearchFacesOutput, error) {
1590	req, out := c.SearchFacesRequest(input)
1591	return out, req.Send()
1592}
1593
1594// SearchFacesWithContext is the same as SearchFaces with the addition of
1595// the ability to pass a context and additional request options.
1596//
1597// See SearchFaces for details on how to use this API operation.
1598//
1599// The context must be non-nil and will be used for request cancellation. If
1600// the context is nil a panic will occur. In the future the SDK may create
1601// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1602// for more information on using Contexts.
1603func (c *Rekognition) SearchFacesWithContext(ctx aws.Context, input *SearchFacesInput, opts ...request.Option) (*SearchFacesOutput, error) {
1604	req, out := c.SearchFacesRequest(input)
1605	req.SetContext(ctx)
1606	req.ApplyOptions(opts...)
1607	return out, req.Send()
1608}
1609
1610const opSearchFacesByImage = "SearchFacesByImage"
1611
1612// SearchFacesByImageRequest generates a "aws/request.Request" representing the
1613// client's request for the SearchFacesByImage operation. The "output" return
1614// value will be populated with the request's response once the request complets
1615// successfuly.
1616//
1617// Use "Send" method on the returned Request to send the API call to the service.
1618// the "output" return value is not valid until after Send returns without error.
1619//
1620// See SearchFacesByImage for more information on using the SearchFacesByImage
1621// API call, and error handling.
1622//
1623// This method is useful when you want to inject custom logic or configuration
1624// into the SDK's request lifecycle. Such as custom headers, or retry logic.
1625//
1626//
1627//    // Example sending a request using the SearchFacesByImageRequest method.
1628//    req, resp := client.SearchFacesByImageRequest(params)
1629//
1630//    err := req.Send()
1631//    if err == nil { // resp is now filled
1632//        fmt.Println(resp)
1633//    }
1634func (c *Rekognition) SearchFacesByImageRequest(input *SearchFacesByImageInput) (req *request.Request, output *SearchFacesByImageOutput) {
1635	op := &request.Operation{
1636		Name:       opSearchFacesByImage,
1637		HTTPMethod: "POST",
1638		HTTPPath:   "/",
1639	}
1640
1641	if input == nil {
1642		input = &SearchFacesByImageInput{}
1643	}
1644
1645	output = &SearchFacesByImageOutput{}
1646	req = c.newRequest(op, input, output)
1647	return
1648}
1649
1650// SearchFacesByImage API operation for Amazon Rekognition.
1651//
1652// For a given input image, first detects the largest face in the image, and
1653// then searches the specified collection for matching faces. The operation
1654// compares the features of the input face with faces in the specified collection.
1655//
1656// To search for all faces in an input image, you might first call the operation,
1657// and then use the face IDs returned in subsequent calls to the operation.
1658//
1659//  You can also call the DetectFaces operation and use the bounding boxes in
1660// the response to make face crops, which then you can pass in to the SearchFacesByImage
1661// operation.
1662//
1663// The response returns an array of faces that match, ordered by similarity
1664// score with the highest similarity first. More specifically, it is an array
1665// of metadata for each face match found. Along with the metadata, the response
1666// also includes a similarity indicating how similar the face is to the input
1667// face. In the response, the operation also returns the bounding box (and a
1668// confidence level that the bounding box contains a face) of the face that
1669// Amazon Rekognition used for the input image.
1670//
1671// For an example, see example3.
1672//
1673// This operation requires permissions to perform the rekognition:SearchFacesByImage
1674// action.
1675//
1676// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
1677// with awserr.Error's Code and Message methods to get detailed information about
1678// the error.
1679//
1680// See the AWS API reference guide for Amazon Rekognition's
1681// API operation SearchFacesByImage for usage and error information.
1682//
1683// Returned Error Codes:
1684//   * ErrCodeInvalidS3ObjectException "InvalidS3ObjectException"
1685//   Amazon Rekognition is unable to access the S3 object specified in the request.
1686//
1687//   * ErrCodeInvalidParameterException "InvalidParameterException"
1688//   Input parameter violated a constraint. Validate your parameter before calling
1689//   the API operation again.
1690//
1691//   * ErrCodeImageTooLargeException "ImageTooLargeException"
1692//   The input image size exceeds the allowed limit. For more information, see
1693//   limits.
1694//
1695//   * ErrCodeAccessDeniedException "AccessDeniedException"
1696//   You are not authorized to perform the action.
1697//
1698//   * ErrCodeInternalServerError "InternalServerError"
1699//   Amazon Rekognition experienced a service issue. Try your call again.
1700//
1701//   * ErrCodeThrottlingException "ThrottlingException"
1702//   Amazon Rekognition is temporarily unable to process the request. Try your
1703//   call again.
1704//
1705//   * ErrCodeProvisionedThroughputExceededException "ProvisionedThroughputExceededException"
1706//   The number of requests exceeded your throughput limit. If you want to increase
1707//   this limit, contact Amazon Rekognition.
1708//
1709//   * ErrCodeResourceNotFoundException "ResourceNotFoundException"
1710//   Collection specified in the request is not found.
1711//
1712//   * ErrCodeInvalidImageFormatException "InvalidImageFormatException"
1713//   The provided image format is not supported.
1714//
1715func (c *Rekognition) SearchFacesByImage(input *SearchFacesByImageInput) (*SearchFacesByImageOutput, error) {
1716	req, out := c.SearchFacesByImageRequest(input)
1717	return out, req.Send()
1718}
1719
1720// SearchFacesByImageWithContext is the same as SearchFacesByImage with the addition of
1721// the ability to pass a context and additional request options.
1722//
1723// See SearchFacesByImage for details on how to use this API operation.
1724//
1725// The context must be non-nil and will be used for request cancellation. If
1726// the context is nil a panic will occur. In the future the SDK may create
1727// sub-contexts for http.Requests. See https://golang.org/pkg/context/
1728// for more information on using Contexts.
1729func (c *Rekognition) SearchFacesByImageWithContext(ctx aws.Context, input *SearchFacesByImageInput, opts ...request.Option) (*SearchFacesByImageOutput, error) {
1730	req, out := c.SearchFacesByImageRequest(input)
1731	req.SetContext(ctx)
1732	req.ApplyOptions(opts...)
1733	return out, req.Send()
1734}
1735
1736// Structure containing the estimated age range, in years, for a face.
1737//
1738// Rekognition estimates an age-range for faces detected in the input image.
1739// Estimated age ranges can overlap; a face of a 5 year old may have an estimated
1740// range of 4-6 whilst the face of a 6 year old may have an estimated range
1741// of 4-8.
1742type AgeRange struct {
1743	_ struct{} `type:"structure"`
1744
1745	// The highest estimated age.
1746	High *int64 `type:"integer"`
1747
1748	// The lowest estimated age.
1749	Low *int64 `type:"integer"`
1750}
1751
1752// String returns the string representation
1753func (s AgeRange) String() string {
1754	return awsutil.Prettify(s)
1755}
1756
1757// GoString returns the string representation
1758func (s AgeRange) GoString() string {
1759	return s.String()
1760}
1761
1762// SetHigh sets the High field's value.
1763func (s *AgeRange) SetHigh(v int64) *AgeRange {
1764	s.High = &v
1765	return s
1766}
1767
1768// SetLow sets the Low field's value.
1769func (s *AgeRange) SetLow(v int64) *AgeRange {
1770	s.Low = &v
1771	return s
1772}
1773
1774// Indicates whether or not the face has a beard, and the confidence level in
1775// the determination.
1776type Beard struct {
1777	_ struct{} `type:"structure"`
1778
1779	// Level of confidence in the determination.
1780	Confidence *float64 `type:"float"`
1781
1782	// Boolean value that indicates whether the face has beard or not.
1783	Value *bool `type:"boolean"`
1784}
1785
1786// String returns the string representation
1787func (s Beard) String() string {
1788	return awsutil.Prettify(s)
1789}
1790
1791// GoString returns the string representation
1792func (s Beard) GoString() string {
1793	return s.String()
1794}
1795
1796// SetConfidence sets the Confidence field's value.
1797func (s *Beard) SetConfidence(v float64) *Beard {
1798	s.Confidence = &v
1799	return s
1800}
1801
1802// SetValue sets the Value field's value.
1803func (s *Beard) SetValue(v bool) *Beard {
1804	s.Value = &v
1805	return s
1806}
1807
1808// Identifies the bounding box around the object or face. The left (x-coordinate)
1809// and top (y-coordinate) are coordinates representing the top and left sides
1810// of the bounding box. Note that the upper-left corner of the image is the
1811// origin (0,0).
1812//
1813// The top and left values returned are ratios of the overall image size. For
1814// example, if the input image is 700x200 pixels, and the top-left coordinate
1815// of the bounding box is 350x50 pixels, the API returns a left value of 0.5
1816// (350/700) and a top value of 0.25 (50/200).
1817//
1818// The width and height values represent the dimensions of the bounding box
1819// as a ratio of the overall image dimension. For example, if the input image
1820// is 700x200 pixels, and the bounding box width is 70 pixels, the width returned
1821// is 0.1.
1822//
1823// The bounding box coordinates can have negative values. For example, if Amazon
1824// Rekognition is able to detect a face that is at the image edge and is only
1825// partially visible, the service can return coordinates that are outside the
1826// image bounds and, depending on the image edge, you might get negative values
1827// or values greater than 1 for the left or top values.
1828type BoundingBox struct {
1829	_ struct{} `type:"structure"`
1830
1831	// Height of the bounding box as a ratio of the overall image height.
1832	Height *float64 `type:"float"`
1833
1834	// Left coordinate of the bounding box as a ratio of overall image width.
1835	Left *float64 `type:"float"`
1836
1837	// Top coordinate of the bounding box as a ratio of overall image height.
1838	Top *float64 `type:"float"`
1839
1840	// Width of the bounding box as a ratio of the overall image width.
1841	Width *float64 `type:"float"`
1842}
1843
1844// String returns the string representation
1845func (s BoundingBox) String() string {
1846	return awsutil.Prettify(s)
1847}
1848
1849// GoString returns the string representation
1850func (s BoundingBox) GoString() string {
1851	return s.String()
1852}
1853
1854// SetHeight sets the Height field's value.
1855func (s *BoundingBox) SetHeight(v float64) *BoundingBox {
1856	s.Height = &v
1857	return s
1858}
1859
1860// SetLeft sets the Left field's value.
1861func (s *BoundingBox) SetLeft(v float64) *BoundingBox {
1862	s.Left = &v
1863	return s
1864}
1865
1866// SetTop sets the Top field's value.
1867func (s *BoundingBox) SetTop(v float64) *BoundingBox {
1868	s.Top = &v
1869	return s
1870}
1871
1872// SetWidth sets the Width field's value.
1873func (s *BoundingBox) SetWidth(v float64) *BoundingBox {
1874	s.Width = &v
1875	return s
1876}
1877
1878// Provides information about a celebrity recognized by the operation.
1879type Celebrity struct {
1880	_ struct{} `type:"structure"`
1881
1882	// Provides information about the celebrity's face, such as its location on
1883	// the image.
1884	Face *ComparedFace `type:"structure"`
1885
1886	// A unique identifier for the celebrity.
1887	Id *string `type:"string"`
1888
1889	// The confidence, in percentage, that Rekognition has that the recognized face
1890	// is the celebrity.
1891	MatchConfidence *float64 `type:"float"`
1892
1893	// The name of the celebrity.
1894	Name *string `type:"string"`
1895
1896	// An array of URLs pointing to additional information about the celebrity.
1897	// If there is no additional information about the celebrity, this list is empty.
1898	Urls []*string `type:"list"`
1899}
1900
1901// String returns the string representation
1902func (s Celebrity) String() string {
1903	return awsutil.Prettify(s)
1904}
1905
1906// GoString returns the string representation
1907func (s Celebrity) GoString() string {
1908	return s.String()
1909}
1910
1911// SetFace sets the Face field's value.
1912func (s *Celebrity) SetFace(v *ComparedFace) *Celebrity {
1913	s.Face = v
1914	return s
1915}
1916
1917// SetId sets the Id field's value.
1918func (s *Celebrity) SetId(v string) *Celebrity {
1919	s.Id = &v
1920	return s
1921}
1922
1923// SetMatchConfidence sets the MatchConfidence field's value.
1924func (s *Celebrity) SetMatchConfidence(v float64) *Celebrity {
1925	s.MatchConfidence = &v
1926	return s
1927}
1928
1929// SetName sets the Name field's value.
1930func (s *Celebrity) SetName(v string) *Celebrity {
1931	s.Name = &v
1932	return s
1933}
1934
1935// SetUrls sets the Urls field's value.
1936func (s *Celebrity) SetUrls(v []*string) *Celebrity {
1937	s.Urls = v
1938	return s
1939}
1940
1941type CompareFacesInput struct {
1942	_ struct{} `type:"structure"`
1943
1944	// The minimum level of confidence in the face matches that a match must meet
1945	// to be included in the FaceMatches array.
1946	SimilarityThreshold *float64 `type:"float"`
1947
1948	// The source image, either as bytes or as an S3 object.
1949	//
1950	// SourceImage is a required field
1951	SourceImage *Image `type:"structure" required:"true"`
1952
1953	// The target image, either as bytes or as an S3 object.
1954	//
1955	// TargetImage is a required field
1956	TargetImage *Image `type:"structure" required:"true"`
1957}
1958
1959// String returns the string representation
1960func (s CompareFacesInput) String() string {
1961	return awsutil.Prettify(s)
1962}
1963
1964// GoString returns the string representation
1965func (s CompareFacesInput) GoString() string {
1966	return s.String()
1967}
1968
1969// Validate inspects the fields of the type to determine if they are valid.
1970func (s *CompareFacesInput) Validate() error {
1971	invalidParams := request.ErrInvalidParams{Context: "CompareFacesInput"}
1972	if s.SourceImage == nil {
1973		invalidParams.Add(request.NewErrParamRequired("SourceImage"))
1974	}
1975	if s.TargetImage == nil {
1976		invalidParams.Add(request.NewErrParamRequired("TargetImage"))
1977	}
1978	if s.SourceImage != nil {
1979		if err := s.SourceImage.Validate(); err != nil {
1980			invalidParams.AddNested("SourceImage", err.(request.ErrInvalidParams))
1981		}
1982	}
1983	if s.TargetImage != nil {
1984		if err := s.TargetImage.Validate(); err != nil {
1985			invalidParams.AddNested("TargetImage", err.(request.ErrInvalidParams))
1986		}
1987	}
1988
1989	if invalidParams.Len() > 0 {
1990		return invalidParams
1991	}
1992	return nil
1993}
1994
1995// SetSimilarityThreshold sets the SimilarityThreshold field's value.
1996func (s *CompareFacesInput) SetSimilarityThreshold(v float64) *CompareFacesInput {
1997	s.SimilarityThreshold = &v
1998	return s
1999}
2000
2001// SetSourceImage sets the SourceImage field's value.
2002func (s *CompareFacesInput) SetSourceImage(v *Image) *CompareFacesInput {
2003	s.SourceImage = v
2004	return s
2005}
2006
2007// SetTargetImage sets the TargetImage field's value.
2008func (s *CompareFacesInput) SetTargetImage(v *Image) *CompareFacesInput {
2009	s.TargetImage = v
2010	return s
2011}
2012
2013// Provides information about a face in a target image that matches the source
2014// image face analysed by CompareFaces. The Face property contains the bounding
2015// box of the face in the target image. The Similarity property is the confidence
2016// that the source image face matches the face in the bounding box.
2017type CompareFacesMatch struct {
2018	_ struct{} `type:"structure"`
2019
2020	// Provides face metadata (bounding box and confidence that the bounding box
2021	// actually contains a face).
2022	Face *ComparedFace `type:"structure"`
2023
2024	// Level of confidence that the faces match.
2025	Similarity *float64 `type:"float"`
2026}
2027
2028// String returns the string representation
2029func (s CompareFacesMatch) String() string {
2030	return awsutil.Prettify(s)
2031}
2032
2033// GoString returns the string representation
2034func (s CompareFacesMatch) GoString() string {
2035	return s.String()
2036}
2037
2038// SetFace sets the Face field's value.
2039func (s *CompareFacesMatch) SetFace(v *ComparedFace) *CompareFacesMatch {
2040	s.Face = v
2041	return s
2042}
2043
2044// SetSimilarity sets the Similarity field's value.
2045func (s *CompareFacesMatch) SetSimilarity(v float64) *CompareFacesMatch {
2046	s.Similarity = &v
2047	return s
2048}
2049
2050type CompareFacesOutput struct {
2051	_ struct{} `type:"structure"`
2052
2053	// An array of faces in the target image that match the source image face. Each
2054	// CompareFacesMatch object provides the bounding box, the confidence level
2055	// that the bounding box contains a face, and the similarity score for the face
2056	// in the bounding box and the face in the source image.
2057	FaceMatches []*CompareFacesMatch `type:"list"`
2058
2059	// The face in the source image that was used for comparison.
2060	SourceImageFace *ComparedSourceImageFace `type:"structure"`
2061
2062	// The orientation of the source image (counterclockwise direction). If your
2063	// application displays the source image, you can use this value to correct
2064	// image orientation. The bounding box coordinates returned in SourceImageFace
2065	// represent the location of the face before the image orientation is corrected.
2066	//
2067	// If the source image is in .jpeg format, it might contain exchangeable image
2068	// (Exif) metadata that includes the image's orientation. If the Exif metadata
2069	// for the source image populates the orientation field, the value of OrientationCorrection
2070	// is null and the SourceImageFace bounding box coordinates represent the location
2071	// of the face after Exif metadata is used to correct the orientation. Images
2072	// in .png format don't contain Exif metadata.
2073	SourceImageOrientationCorrection *string `type:"string" enum:"OrientationCorrection"`
2074
2075	// The orientation of the target image (in counterclockwise direction). If your
2076	// application displays the target image, you can use this value to correct
2077	// the orientation of the image. The bounding box coordinates returned in FaceMatches
2078	// and UnmatchedFaces represent face locations before the image orientation
2079	// is corrected.
2080	//
2081	// If the target image is in .jpg format, it might contain Exif metadata that
2082	// includes the orientation of the image. If the Exif metadata for the target
2083	// image populates the orientation field, the value of OrientationCorrection
2084	// is null and the bounding box coordinates in FaceMatches and UnmatchedFaces
2085	// represent the location of the face after Exif metadata is used to correct
2086	// the orientation. Images in .png format don't contain Exif metadata.
2087	TargetImageOrientationCorrection *string `type:"string" enum:"OrientationCorrection"`
2088
2089	// An array of faces in the target image that did not match the source image
2090	// face.
2091	UnmatchedFaces []*ComparedFace `type:"list"`
2092}
2093
2094// String returns the string representation
2095func (s CompareFacesOutput) String() string {
2096	return awsutil.Prettify(s)
2097}
2098
2099// GoString returns the string representation
2100func (s CompareFacesOutput) GoString() string {
2101	return s.String()
2102}
2103
2104// SetFaceMatches sets the FaceMatches field's value.
2105func (s *CompareFacesOutput) SetFaceMatches(v []*CompareFacesMatch) *CompareFacesOutput {
2106	s.FaceMatches = v
2107	return s
2108}
2109
2110// SetSourceImageFace sets the SourceImageFace field's value.
2111func (s *CompareFacesOutput) SetSourceImageFace(v *ComparedSourceImageFace) *CompareFacesOutput {
2112	s.SourceImageFace = v
2113	return s
2114}
2115
2116// SetSourceImageOrientationCorrection sets the SourceImageOrientationCorrection field's value.
2117func (s *CompareFacesOutput) SetSourceImageOrientationCorrection(v string) *CompareFacesOutput {
2118	s.SourceImageOrientationCorrection = &v
2119	return s
2120}
2121
2122// SetTargetImageOrientationCorrection sets the TargetImageOrientationCorrection field's value.
2123func (s *CompareFacesOutput) SetTargetImageOrientationCorrection(v string) *CompareFacesOutput {
2124	s.TargetImageOrientationCorrection = &v
2125	return s
2126}
2127
2128// SetUnmatchedFaces sets the UnmatchedFaces field's value.
2129func (s *CompareFacesOutput) SetUnmatchedFaces(v []*ComparedFace) *CompareFacesOutput {
2130	s.UnmatchedFaces = v
2131	return s
2132}
2133
2134// Provides face metadata for target image faces that are analysed by CompareFaces
2135// and RecognizeCelebrities.
2136type ComparedFace struct {
2137	_ struct{} `type:"structure"`
2138
2139	// Bounding box of the face.
2140	BoundingBox *BoundingBox `type:"structure"`
2141
2142	// Level of confidence that what the bounding box contains is a face.
2143	Confidence *float64 `type:"float"`
2144
2145	// An array of facial landmarks.
2146	Landmarks []*Landmark `type:"list"`
2147
2148	// Indicates the pose of the face as determined by its pitch, roll, and yaw.
2149	Pose *Pose `type:"structure"`
2150
2151	// Identifies face image brightness and sharpness.
2152	Quality *ImageQuality `type:"structure"`
2153}
2154
2155// String returns the string representation
2156func (s ComparedFace) String() string {
2157	return awsutil.Prettify(s)
2158}
2159
2160// GoString returns the string representation
2161func (s ComparedFace) GoString() string {
2162	return s.String()
2163}
2164
2165// SetBoundingBox sets the BoundingBox field's value.
2166func (s *ComparedFace) SetBoundingBox(v *BoundingBox) *ComparedFace {
2167	s.BoundingBox = v
2168	return s
2169}
2170
2171// SetConfidence sets the Confidence field's value.
2172func (s *ComparedFace) SetConfidence(v float64) *ComparedFace {
2173	s.Confidence = &v
2174	return s
2175}
2176
2177// SetLandmarks sets the Landmarks field's value.
2178func (s *ComparedFace) SetLandmarks(v []*Landmark) *ComparedFace {
2179	s.Landmarks = v
2180	return s
2181}
2182
2183// SetPose sets the Pose field's value.
2184func (s *ComparedFace) SetPose(v *Pose) *ComparedFace {
2185	s.Pose = v
2186	return s
2187}
2188
2189// SetQuality sets the Quality field's value.
2190func (s *ComparedFace) SetQuality(v *ImageQuality) *ComparedFace {
2191	s.Quality = v
2192	return s
2193}
2194
2195// Type that describes the face Amazon Rekognition chose to compare with the
2196// faces in the target. This contains a bounding box for the selected face and
2197// confidence level that the bounding box contains a face. Note that Amazon
2198// Rekognition selects the largest face in the source image for this comparison.
2199type ComparedSourceImageFace struct {
2200	_ struct{} `type:"structure"`
2201
2202	// Bounding box of the face.
2203	BoundingBox *BoundingBox `type:"structure"`
2204
2205	// Confidence level that the selected bounding box contains a face.
2206	Confidence *float64 `type:"float"`
2207}
2208
2209// String returns the string representation
2210func (s ComparedSourceImageFace) String() string {
2211	return awsutil.Prettify(s)
2212}
2213
2214// GoString returns the string representation
2215func (s ComparedSourceImageFace) GoString() string {
2216	return s.String()
2217}
2218
2219// SetBoundingBox sets the BoundingBox field's value.
2220func (s *ComparedSourceImageFace) SetBoundingBox(v *BoundingBox) *ComparedSourceImageFace {
2221	s.BoundingBox = v
2222	return s
2223}
2224
2225// SetConfidence sets the Confidence field's value.
2226func (s *ComparedSourceImageFace) SetConfidence(v float64) *ComparedSourceImageFace {
2227	s.Confidence = &v
2228	return s
2229}
2230
2231type CreateCollectionInput struct {
2232	_ struct{} `type:"structure"`
2233
2234	// ID for the collection that you are creating.
2235	//
2236	// CollectionId is a required field
2237	CollectionId *string `min:"1" type:"string" required:"true"`
2238}
2239
2240// String returns the string representation
2241func (s CreateCollectionInput) String() string {
2242	return awsutil.Prettify(s)
2243}
2244
2245// GoString returns the string representation
2246func (s CreateCollectionInput) GoString() string {
2247	return s.String()
2248}
2249
2250// Validate inspects the fields of the type to determine if they are valid.
2251func (s *CreateCollectionInput) Validate() error {
2252	invalidParams := request.ErrInvalidParams{Context: "CreateCollectionInput"}
2253	if s.CollectionId == nil {
2254		invalidParams.Add(request.NewErrParamRequired("CollectionId"))
2255	}
2256	if s.CollectionId != nil && len(*s.CollectionId) < 1 {
2257		invalidParams.Add(request.NewErrParamMinLen("CollectionId", 1))
2258	}
2259
2260	if invalidParams.Len() > 0 {
2261		return invalidParams
2262	}
2263	return nil
2264}
2265
2266// SetCollectionId sets the CollectionId field's value.
2267func (s *CreateCollectionInput) SetCollectionId(v string) *CreateCollectionInput {
2268	s.CollectionId = &v
2269	return s
2270}
2271
2272type CreateCollectionOutput struct {
2273	_ struct{} `type:"structure"`
2274
2275	// Amazon Resource Name (ARN) of the collection. You can use this to manage
2276	// permissions on your resources.
2277	CollectionArn *string `type:"string"`
2278
2279	// HTTP status code indicating the result of the operation.
2280	StatusCode *int64 `type:"integer"`
2281}
2282
2283// String returns the string representation
2284func (s CreateCollectionOutput) String() string {
2285	return awsutil.Prettify(s)
2286}
2287
2288// GoString returns the string representation
2289func (s CreateCollectionOutput) GoString() string {
2290	return s.String()
2291}
2292
2293// SetCollectionArn sets the CollectionArn field's value.
2294func (s *CreateCollectionOutput) SetCollectionArn(v string) *CreateCollectionOutput {
2295	s.CollectionArn = &v
2296	return s
2297}
2298
2299// SetStatusCode sets the StatusCode field's value.
2300func (s *CreateCollectionOutput) SetStatusCode(v int64) *CreateCollectionOutput {
2301	s.StatusCode = &v
2302	return s
2303}
2304
2305type DeleteCollectionInput struct {
2306	_ struct{} `type:"structure"`
2307
2308	// ID of the collection to delete.
2309	//
2310	// CollectionId is a required field
2311	CollectionId *string `min:"1" type:"string" required:"true"`
2312}
2313
2314// String returns the string representation
2315func (s DeleteCollectionInput) String() string {
2316	return awsutil.Prettify(s)
2317}
2318
2319// GoString returns the string representation
2320func (s DeleteCollectionInput) GoString() string {
2321	return s.String()
2322}
2323
2324// Validate inspects the fields of the type to determine if they are valid.
2325func (s *DeleteCollectionInput) Validate() error {
2326	invalidParams := request.ErrInvalidParams{Context: "DeleteCollectionInput"}
2327	if s.CollectionId == nil {
2328		invalidParams.Add(request.NewErrParamRequired("CollectionId"))
2329	}
2330	if s.CollectionId != nil && len(*s.CollectionId) < 1 {
2331		invalidParams.Add(request.NewErrParamMinLen("CollectionId", 1))
2332	}
2333
2334	if invalidParams.Len() > 0 {
2335		return invalidParams
2336	}
2337	return nil
2338}
2339
2340// SetCollectionId sets the CollectionId field's value.
2341func (s *DeleteCollectionInput) SetCollectionId(v string) *DeleteCollectionInput {
2342	s.CollectionId = &v
2343	return s
2344}
2345
2346type DeleteCollectionOutput struct {
2347	_ struct{} `type:"structure"`
2348
2349	// HTTP status code that indicates the result of the operation.
2350	StatusCode *int64 `type:"integer"`
2351}
2352
2353// String returns the string representation
2354func (s DeleteCollectionOutput) String() string {
2355	return awsutil.Prettify(s)
2356}
2357
2358// GoString returns the string representation
2359func (s DeleteCollectionOutput) GoString() string {
2360	return s.String()
2361}
2362
2363// SetStatusCode sets the StatusCode field's value.
2364func (s *DeleteCollectionOutput) SetStatusCode(v int64) *DeleteCollectionOutput {
2365	s.StatusCode = &v
2366	return s
2367}
2368
2369type DeleteFacesInput struct {
2370	_ struct{} `type:"structure"`
2371
2372	// Collection from which to remove the specific faces.
2373	//
2374	// CollectionId is a required field
2375	CollectionId *string `min:"1" type:"string" required:"true"`
2376
2377	// An array of face IDs to delete.
2378	//
2379	// FaceIds is a required field
2380	FaceIds []*string `min:"1" type:"list" required:"true"`
2381}
2382
2383// String returns the string representation
2384func (s DeleteFacesInput) String() string {
2385	return awsutil.Prettify(s)
2386}
2387
2388// GoString returns the string representation
2389func (s DeleteFacesInput) GoString() string {
2390	return s.String()
2391}
2392
2393// Validate inspects the fields of the type to determine if they are valid.
2394func (s *DeleteFacesInput) Validate() error {
2395	invalidParams := request.ErrInvalidParams{Context: "DeleteFacesInput"}
2396	if s.CollectionId == nil {
2397		invalidParams.Add(request.NewErrParamRequired("CollectionId"))
2398	}
2399	if s.CollectionId != nil && len(*s.CollectionId) < 1 {
2400		invalidParams.Add(request.NewErrParamMinLen("CollectionId", 1))
2401	}
2402	if s.FaceIds == nil {
2403		invalidParams.Add(request.NewErrParamRequired("FaceIds"))
2404	}
2405	if s.FaceIds != nil && len(s.FaceIds) < 1 {
2406		invalidParams.Add(request.NewErrParamMinLen("FaceIds", 1))
2407	}
2408
2409	if invalidParams.Len() > 0 {
2410		return invalidParams
2411	}
2412	return nil
2413}
2414
2415// SetCollectionId sets the CollectionId field's value.
2416func (s *DeleteFacesInput) SetCollectionId(v string) *DeleteFacesInput {
2417	s.CollectionId = &v
2418	return s
2419}
2420
2421// SetFaceIds sets the FaceIds field's value.
2422func (s *DeleteFacesInput) SetFaceIds(v []*string) *DeleteFacesInput {
2423	s.FaceIds = v
2424	return s
2425}
2426
2427type DeleteFacesOutput struct {
2428	_ struct{} `type:"structure"`
2429
2430	// An array of strings (face IDs) of the faces that were deleted.
2431	DeletedFaces []*string `min:"1" type:"list"`
2432}
2433
2434// String returns the string representation
2435func (s DeleteFacesOutput) String() string {
2436	return awsutil.Prettify(s)
2437}
2438
2439// GoString returns the string representation
2440func (s DeleteFacesOutput) GoString() string {
2441	return s.String()
2442}
2443
2444// SetDeletedFaces sets the DeletedFaces field's value.
2445func (s *DeleteFacesOutput) SetDeletedFaces(v []*string) *DeleteFacesOutput {
2446	s.DeletedFaces = v
2447	return s
2448}
2449
2450type DetectFacesInput struct {
2451	_ struct{} `type:"structure"`
2452
2453	// An array of facial attributes you want to be returned. This can be the default
2454	// list of attributes or all attributes. If you don't specify a value for Attributes
2455	// or if you specify ["DEFAULT"], the API returns the following subset of facial
2456	// attributes: BoundingBox, Confidence, Pose, Quality and Landmarks. If you
2457	// provide ["ALL"], all facial attributes are returned but the operation will
2458	// take longer to complete.
2459	//
2460	// If you provide both, ["ALL", "DEFAULT"], the service uses a logical AND operator
2461	// to determine which attributes to return (in this case, all attributes).
2462	Attributes []*string `type:"list"`
2463
2464	// The image in which you want to detect faces. You can specify a blob or an
2465	// S3 object.
2466	//
2467	// Image is a required field
2468	Image *Image `type:"structure" required:"true"`
2469}
2470
2471// String returns the string representation
2472func (s DetectFacesInput) String() string {
2473	return awsutil.Prettify(s)
2474}
2475
2476// GoString returns the string representation
2477func (s DetectFacesInput) GoString() string {
2478	return s.String()
2479}
2480
2481// Validate inspects the fields of the type to determine if they are valid.
2482func (s *DetectFacesInput) Validate() error {
2483	invalidParams := request.ErrInvalidParams{Context: "DetectFacesInput"}
2484	if s.Image == nil {
2485		invalidParams.Add(request.NewErrParamRequired("Image"))
2486	}
2487	if s.Image != nil {
2488		if err := s.Image.Validate(); err != nil {
2489			invalidParams.AddNested("Image", err.(request.ErrInvalidParams))
2490		}
2491	}
2492
2493	if invalidParams.Len() > 0 {
2494		return invalidParams
2495	}
2496	return nil
2497}
2498
2499// SetAttributes sets the Attributes field's value.
2500func (s *DetectFacesInput) SetAttributes(v []*string) *DetectFacesInput {
2501	s.Attributes = v
2502	return s
2503}
2504
2505// SetImage sets the Image field's value.
2506func (s *DetectFacesInput) SetImage(v *Image) *DetectFacesInput {
2507	s.Image = v
2508	return s
2509}
2510
2511type DetectFacesOutput struct {
2512	_ struct{} `type:"structure"`
2513
2514	// Details of each face found in the image.
2515	FaceDetails []*FaceDetail `type:"list"`
2516
2517	// The orientation of the input image (counter-clockwise direction). If your
2518	// application displays the image, you can use this value to correct image orientation.
2519	// The bounding box coordinates returned in FaceDetails represent face locations
2520	// before the image orientation is corrected.
2521	//
2522	// If the input image is in .jpeg format, it might contain exchangeable image
2523	// (Exif) metadata that includes the image's orientation. If so, and the Exif
2524	// metadata for the input image populates the orientation field, the value of
2525	// OrientationCorrection is null and the FaceDetails bounding box coordinates
2526	// represent face locations after Exif metadata is used to correct the image
2527	// orientation. Images in .png format don't contain Exif metadata.
2528	OrientationCorrection *string `type:"string" enum:"OrientationCorrection"`
2529}
2530
2531// String returns the string representation
2532func (s DetectFacesOutput) String() string {
2533	return awsutil.Prettify(s)
2534}
2535
2536// GoString returns the string representation
2537func (s DetectFacesOutput) GoString() string {
2538	return s.String()
2539}
2540
2541// SetFaceDetails sets the FaceDetails field's value.
2542func (s *DetectFacesOutput) SetFaceDetails(v []*FaceDetail) *DetectFacesOutput {
2543	s.FaceDetails = v
2544	return s
2545}
2546
2547// SetOrientationCorrection sets the OrientationCorrection field's value.
2548func (s *DetectFacesOutput) SetOrientationCorrection(v string) *DetectFacesOutput {
2549	s.OrientationCorrection = &v
2550	return s
2551}
2552
2553type DetectLabelsInput struct {
2554	_ struct{} `type:"structure"`
2555
2556	// The input image. You can provide a blob of image bytes or an S3 object.
2557	//
2558	// Image is a required field
2559	Image *Image `type:"structure" required:"true"`
2560
2561	// Maximum number of labels you want the service to return in the response.
2562	// The service returns the specified number of highest confidence labels.
2563	MaxLabels *int64 `type:"integer"`
2564
2565	// Specifies the minimum confidence level for the labels to return. Amazon Rekognition
2566	// doesn't return any labels with confidence lower than this specified value.
2567	//
2568	// If MinConfidence is not specified, the operation returns labels with a confidence
2569	// values greater than or equal to 50 percent.
2570	MinConfidence *float64 `type:"float"`
2571}
2572
2573// String returns the string representation
2574func (s DetectLabelsInput) String() string {
2575	return awsutil.Prettify(s)
2576}
2577
2578// GoString returns the string representation
2579func (s DetectLabelsInput) GoString() string {
2580	return s.String()
2581}
2582
2583// Validate inspects the fields of the type to determine if they are valid.
2584func (s *DetectLabelsInput) Validate() error {
2585	invalidParams := request.ErrInvalidParams{Context: "DetectLabelsInput"}
2586	if s.Image == nil {
2587		invalidParams.Add(request.NewErrParamRequired("Image"))
2588	}
2589	if s.Image != nil {
2590		if err := s.Image.Validate(); err != nil {
2591			invalidParams.AddNested("Image", err.(request.ErrInvalidParams))
2592		}
2593	}
2594
2595	if invalidParams.Len() > 0 {
2596		return invalidParams
2597	}
2598	return nil
2599}
2600
2601// SetImage sets the Image field's value.
2602func (s *DetectLabelsInput) SetImage(v *Image) *DetectLabelsInput {
2603	s.Image = v
2604	return s
2605}
2606
2607// SetMaxLabels sets the MaxLabels field's value.
2608func (s *DetectLabelsInput) SetMaxLabels(v int64) *DetectLabelsInput {
2609	s.MaxLabels = &v
2610	return s
2611}
2612
2613// SetMinConfidence sets the MinConfidence field's value.
2614func (s *DetectLabelsInput) SetMinConfidence(v float64) *DetectLabelsInput {
2615	s.MinConfidence = &v
2616	return s
2617}
2618
2619type DetectLabelsOutput struct {
2620	_ struct{} `type:"structure"`
2621
2622	// An array of labels for the real-world objects detected.
2623	Labels []*Label `type:"list"`
2624
2625	// The orientation of the input image (counter-clockwise direction). If your
2626	// application displays the image, you can use this value to correct the orientation.
2627	// If Amazon Rekognition detects that the input image was rotated (for example,
2628	// by 90 degrees), it first corrects the orientation before detecting the labels.
2629	//
2630	// If the input image Exif metadata populates the orientation field, Amazon
2631	// Rekognition does not perform orientation correction and the value of OrientationCorrection
2632	// will be null.
2633	OrientationCorrection *string `type:"string" enum:"OrientationCorrection"`
2634}
2635
2636// String returns the string representation
2637func (s DetectLabelsOutput) String() string {
2638	return awsutil.Prettify(s)
2639}
2640
2641// GoString returns the string representation
2642func (s DetectLabelsOutput) GoString() string {
2643	return s.String()
2644}
2645
2646// SetLabels sets the Labels field's value.
2647func (s *DetectLabelsOutput) SetLabels(v []*Label) *DetectLabelsOutput {
2648	s.Labels = v
2649	return s
2650}
2651
2652// SetOrientationCorrection sets the OrientationCorrection field's value.
2653func (s *DetectLabelsOutput) SetOrientationCorrection(v string) *DetectLabelsOutput {
2654	s.OrientationCorrection = &v
2655	return s
2656}
2657
2658type DetectModerationLabelsInput struct {
2659	_ struct{} `type:"structure"`
2660
2661	// The input image as bytes or an S3 object.
2662	//
2663	// Image is a required field
2664	Image *Image `type:"structure" required:"true"`
2665
2666	// Specifies the minimum confidence level for the labels to return. Amazon Rekognition
2667	// doesn't return any labels with a confidence level lower than this specified
2668	// value.
2669	//
2670	// If you don't specify MinConfidence, the operation returns labels with confidence
2671	// values greater than or equal to 50 percent.
2672	MinConfidence *float64 `type:"float"`
2673}
2674
2675// String returns the string representation
2676func (s DetectModerationLabelsInput) String() string {
2677	return awsutil.Prettify(s)
2678}
2679
2680// GoString returns the string representation
2681func (s DetectModerationLabelsInput) GoString() string {
2682	return s.String()
2683}
2684
2685// Validate inspects the fields of the type to determine if they are valid.
2686func (s *DetectModerationLabelsInput) Validate() error {
2687	invalidParams := request.ErrInvalidParams{Context: "DetectModerationLabelsInput"}
2688	if s.Image == nil {
2689		invalidParams.Add(request.NewErrParamRequired("Image"))
2690	}
2691	if s.Image != nil {
2692		if err := s.Image.Validate(); err != nil {
2693			invalidParams.AddNested("Image", err.(request.ErrInvalidParams))
2694		}
2695	}
2696
2697	if invalidParams.Len() > 0 {
2698		return invalidParams
2699	}
2700	return nil
2701}
2702
2703// SetImage sets the Image field's value.
2704func (s *DetectModerationLabelsInput) SetImage(v *Image) *DetectModerationLabelsInput {
2705	s.Image = v
2706	return s
2707}
2708
2709// SetMinConfidence sets the MinConfidence field's value.
2710func (s *DetectModerationLabelsInput) SetMinConfidence(v float64) *DetectModerationLabelsInput {
2711	s.MinConfidence = &v
2712	return s
2713}
2714
2715type DetectModerationLabelsOutput struct {
2716	_ struct{} `type:"structure"`
2717
2718	// An array of labels for explicit or suggestive adult content found in the
2719	// image. The list includes the top-level label and each child label detected
2720	// in the image. This is useful for filtering specific categories of content.
2721	ModerationLabels []*ModerationLabel `type:"list"`
2722}
2723
2724// String returns the string representation
2725func (s DetectModerationLabelsOutput) String() string {
2726	return awsutil.Prettify(s)
2727}
2728
2729// GoString returns the string representation
2730func (s DetectModerationLabelsOutput) GoString() string {
2731	return s.String()
2732}
2733
2734// SetModerationLabels sets the ModerationLabels field's value.
2735func (s *DetectModerationLabelsOutput) SetModerationLabels(v []*ModerationLabel) *DetectModerationLabelsOutput {
2736	s.ModerationLabels = v
2737	return s
2738}
2739
2740// The emotions detected on the face, and the confidence level in the determination.
2741// For example, HAPPY, SAD, and ANGRY.
2742type Emotion struct {
2743	_ struct{} `type:"structure"`
2744
2745	// Level of confidence in the determination.
2746	Confidence *float64 `type:"float"`
2747
2748	// Type of emotion detected.
2749	Type *string `type:"string" enum:"EmotionName"`
2750}
2751
2752// String returns the string representation
2753func (s Emotion) String() string {
2754	return awsutil.Prettify(s)
2755}
2756
2757// GoString returns the string representation
2758func (s Emotion) GoString() string {
2759	return s.String()
2760}
2761
2762// SetConfidence sets the Confidence field's value.
2763func (s *Emotion) SetConfidence(v float64) *Emotion {
2764	s.Confidence = &v
2765	return s
2766}
2767
2768// SetType sets the Type field's value.
2769func (s *Emotion) SetType(v string) *Emotion {
2770	s.Type = &v
2771	return s
2772}
2773
2774// Indicates whether or not the eyes on the face are open, and the confidence
2775// level in the determination.
2776type EyeOpen struct {
2777	_ struct{} `type:"structure"`
2778
2779	// Level of confidence in the determination.
2780	Confidence *float64 `type:"float"`
2781
2782	// Boolean value that indicates whether the eyes on the face are open.
2783	Value *bool `type:"boolean"`
2784}
2785
2786// String returns the string representation
2787func (s EyeOpen) String() string {
2788	return awsutil.Prettify(s)
2789}
2790
2791// GoString returns the string representation
2792func (s EyeOpen) GoString() string {
2793	return s.String()
2794}
2795
2796// SetConfidence sets the Confidence field's value.
2797func (s *EyeOpen) SetConfidence(v float64) *EyeOpen {
2798	s.Confidence = &v
2799	return s
2800}
2801
2802// SetValue sets the Value field's value.
2803func (s *EyeOpen) SetValue(v bool) *EyeOpen {
2804	s.Value = &v
2805	return s
2806}
2807
2808// Indicates whether or not the face is wearing eye glasses, and the confidence
2809// level in the determination.
2810type Eyeglasses struct {
2811	_ struct{} `type:"structure"`
2812
2813	// Level of confidence in the determination.
2814	Confidence *float64 `type:"float"`
2815
2816	// Boolean value that indicates whether the face is wearing eye glasses or not.
2817	Value *bool `type:"boolean"`
2818}
2819
2820// String returns the string representation
2821func (s Eyeglasses) String() string {
2822	return awsutil.Prettify(s)
2823}
2824
2825// GoString returns the string representation
2826func (s Eyeglasses) GoString() string {
2827	return s.String()
2828}
2829
2830// SetConfidence sets the Confidence field's value.
2831func (s *Eyeglasses) SetConfidence(v float64) *Eyeglasses {
2832	s.Confidence = &v
2833	return s
2834}
2835
2836// SetValue sets the Value field's value.
2837func (s *Eyeglasses) SetValue(v bool) *Eyeglasses {
2838	s.Value = &v
2839	return s
2840}
2841
2842// Describes the face properties such as the bounding box, face ID, image ID
2843// of the input image, and external image ID that you assigned.
2844type Face struct {
2845	_ struct{} `type:"structure"`
2846
2847	// Bounding box of the face.
2848	BoundingBox *BoundingBox `type:"structure"`
2849
2850	// Confidence level that the bounding box contains a face (and not a different
2851	// object such as a tree).
2852	Confidence *float64 `type:"float"`
2853
2854	// Identifier that you assign to all the faces in the input image.
2855	ExternalImageId *string `min:"1" type:"string"`
2856
2857	// Unique identifier that Amazon Rekognition assigns to the face.
2858	FaceId *string `type:"string"`
2859
2860	// Unique identifier that Amazon Rekognition assigns to the input image.
2861	ImageId *string `type:"string"`
2862}
2863
2864// String returns the string representation
2865func (s Face) String() string {
2866	return awsutil.Prettify(s)
2867}
2868
2869// GoString returns the string representation
2870func (s Face) GoString() string {
2871	return s.String()
2872}
2873
2874// SetBoundingBox sets the BoundingBox field's value.
2875func (s *Face) SetBoundingBox(v *BoundingBox) *Face {
2876	s.BoundingBox = v
2877	return s
2878}
2879
2880// SetConfidence sets the Confidence field's value.
2881func (s *Face) SetConfidence(v float64) *Face {
2882	s.Confidence = &v
2883	return s
2884}
2885
2886// SetExternalImageId sets the ExternalImageId field's value.
2887func (s *Face) SetExternalImageId(v string) *Face {
2888	s.ExternalImageId = &v
2889	return s
2890}
2891
2892// SetFaceId sets the FaceId field's value.
2893func (s *Face) SetFaceId(v string) *Face {
2894	s.FaceId = &v
2895	return s
2896}
2897
2898// SetImageId sets the ImageId field's value.
2899func (s *Face) SetImageId(v string) *Face {
2900	s.ImageId = &v
2901	return s
2902}
2903
2904// Structure containing attributes of the face that the algorithm detected.
2905type FaceDetail struct {
2906	_ struct{} `type:"structure"`
2907
2908	// The estimated age range, in years, for the face. Low represents the lowest
2909	// estimated age and High represents the highest estimated age.
2910	AgeRange *AgeRange `type:"structure"`
2911
2912	// Indicates whether or not the face has a beard, and the confidence level in
2913	// the determination.
2914	Beard *Beard `type:"structure"`
2915
2916	// Bounding box of the face.
2917	BoundingBox *BoundingBox `type:"structure"`
2918
2919	// Confidence level that the bounding box contains a face (and not a different
2920	// object such as a tree).
2921	Confidence *float64 `type:"float"`
2922
2923	// The emotions detected on the face, and the confidence level in the determination.
2924	// For example, HAPPY, SAD, and ANGRY.
2925	Emotions []*Emotion `type:"list"`
2926
2927	// Indicates whether or not the face is wearing eye glasses, and the confidence
2928	// level in the determination.
2929	Eyeglasses *Eyeglasses `type:"structure"`
2930
2931	// Indicates whether or not the eyes on the face are open, and the confidence
2932	// level in the determination.
2933	EyesOpen *EyeOpen `type:"structure"`
2934
2935	// Gender of the face and the confidence level in the determination.
2936	Gender *Gender `type:"structure"`
2937
2938	// Indicates the location of landmarks on the face.
2939	Landmarks []*Landmark `type:"list"`
2940
2941	// Indicates whether or not the mouth on the face is open, and the confidence
2942	// level in the determination.
2943	MouthOpen *MouthOpen `type:"structure"`
2944
2945	// Indicates whether or not the face has a mustache, and the confidence level
2946	// in the determination.
2947	Mustache *Mustache `type:"structure"`
2948
2949	// Indicates the pose of the face as determined by its pitch, roll, and yaw.
2950	Pose *Pose `type:"structure"`
2951
2952	// Identifies image brightness and sharpness.
2953	Quality *ImageQuality `type:"structure"`
2954
2955	// Indicates whether or not the face is smiling, and the confidence level in
2956	// the determination.
2957	Smile *Smile `type:"structure"`
2958
2959	// Indicates whether or not the face is wearing sunglasses, and the confidence
2960	// level in the determination.
2961	Sunglasses *Sunglasses `type:"structure"`
2962}
2963
2964// String returns the string representation
2965func (s FaceDetail) String() string {
2966	return awsutil.Prettify(s)
2967}
2968
2969// GoString returns the string representation
2970func (s FaceDetail) GoString() string {
2971	return s.String()
2972}
2973
2974// SetAgeRange sets the AgeRange field's value.
2975func (s *FaceDetail) SetAgeRange(v *AgeRange) *FaceDetail {
2976	s.AgeRange = v
2977	return s
2978}
2979
2980// SetBeard sets the Beard field's value.
2981func (s *FaceDetail) SetBeard(v *Beard) *FaceDetail {
2982	s.Beard = v
2983	return s
2984}
2985
2986// SetBoundingBox sets the BoundingBox field's value.
2987func (s *FaceDetail) SetBoundingBox(v *BoundingBox) *FaceDetail {
2988	s.BoundingBox = v
2989	return s
2990}
2991
2992// SetConfidence sets the Confidence field's value.
2993func (s *FaceDetail) SetConfidence(v float64) *FaceDetail {
2994	s.Confidence = &v
2995	return s
2996}
2997
2998// SetEmotions sets the Emotions field's value.
2999func (s *FaceDetail) SetEmotions(v []*Emotion) *FaceDetail {
3000	s.Emotions = v
3001	return s
3002}
3003
3004// SetEyeglasses sets the Eyeglasses field's value.
3005func (s *FaceDetail) SetEyeglasses(v *Eyeglasses) *FaceDetail {
3006	s.Eyeglasses = v
3007	return s
3008}
3009
3010// SetEyesOpen sets the EyesOpen field's value.
3011func (s *FaceDetail) SetEyesOpen(v *EyeOpen) *FaceDetail {
3012	s.EyesOpen = v
3013	return s
3014}
3015
3016// SetGender sets the Gender field's value.
3017func (s *FaceDetail) SetGender(v *Gender) *FaceDetail {
3018	s.Gender = v
3019	return s
3020}
3021
3022// SetLandmarks sets the Landmarks field's value.
3023func (s *FaceDetail) SetLandmarks(v []*Landmark) *FaceDetail {
3024	s.Landmarks = v
3025	return s
3026}
3027
3028// SetMouthOpen sets the MouthOpen field's value.
3029func (s *FaceDetail) SetMouthOpen(v *MouthOpen) *FaceDetail {
3030	s.MouthOpen = v
3031	return s
3032}
3033
3034// SetMustache sets the Mustache field's value.
3035func (s *FaceDetail) SetMustache(v *Mustache) *FaceDetail {
3036	s.Mustache = v
3037	return s
3038}
3039
3040// SetPose sets the Pose field's value.
3041func (s *FaceDetail) SetPose(v *Pose) *FaceDetail {
3042	s.Pose = v
3043	return s
3044}
3045
3046// SetQuality sets the Quality field's value.
3047func (s *FaceDetail) SetQuality(v *ImageQuality) *FaceDetail {
3048	s.Quality = v
3049	return s
3050}
3051
3052// SetSmile sets the Smile field's value.
3053func (s *FaceDetail) SetSmile(v *Smile) *FaceDetail {
3054	s.Smile = v
3055	return s
3056}
3057
3058// SetSunglasses sets the Sunglasses field's value.
3059func (s *FaceDetail) SetSunglasses(v *Sunglasses) *FaceDetail {
3060	s.Sunglasses = v
3061	return s
3062}
3063
3064// Provides face metadata. In addition, it also provides the confidence in the
3065// match of this face with the input face.
3066type FaceMatch struct {
3067	_ struct{} `type:"structure"`
3068
3069	// Describes the face properties such as the bounding box, face ID, image ID
3070	// of the source image, and external image ID that you assigned.
3071	Face *Face `type:"structure"`
3072
3073	// Confidence in the match of this face with the input face.
3074	Similarity *float64 `type:"float"`
3075}
3076
3077// String returns the string representation
3078func (s FaceMatch) String() string {
3079	return awsutil.Prettify(s)
3080}
3081
3082// GoString returns the string representation
3083func (s FaceMatch) GoString() string {
3084	return s.String()
3085}
3086
3087// SetFace sets the Face field's value.
3088func (s *FaceMatch) SetFace(v *Face) *FaceMatch {
3089	s.Face = v
3090	return s
3091}
3092
3093// SetSimilarity sets the Similarity field's value.
3094func (s *FaceMatch) SetSimilarity(v float64) *FaceMatch {
3095	s.Similarity = &v
3096	return s
3097}
3098
3099// Object containing both the face metadata (stored in the back-end database)
3100// and facial attributes that are detected but aren't stored in the database.
3101type FaceRecord struct {
3102	_ struct{} `type:"structure"`
3103
3104	// Describes the face properties such as the bounding box, face ID, image ID
3105	// of the input image, and external image ID that you assigned.
3106	Face *Face `type:"structure"`
3107
3108	// Structure containing attributes of the face that the algorithm detected.
3109	FaceDetail *FaceDetail `type:"structure"`
3110}
3111
3112// String returns the string representation
3113func (s FaceRecord) String() string {
3114	return awsutil.Prettify(s)
3115}
3116
3117// GoString returns the string representation
3118func (s FaceRecord) GoString() string {
3119	return s.String()
3120}
3121
3122// SetFace sets the Face field's value.
3123func (s *FaceRecord) SetFace(v *Face) *FaceRecord {
3124	s.Face = v
3125	return s
3126}
3127
3128// SetFaceDetail sets the FaceDetail field's value.
3129func (s *FaceRecord) SetFaceDetail(v *FaceDetail) *FaceRecord {
3130	s.FaceDetail = v
3131	return s
3132}
3133
3134// Gender of the face and the confidence level in the determination.
3135type Gender struct {
3136	_ struct{} `type:"structure"`
3137
3138	// Level of confidence in the determination.
3139	Confidence *float64 `type:"float"`
3140
3141	// Gender of the face.
3142	Value *string `type:"string" enum:"GenderType"`
3143}
3144
3145// String returns the string representation
3146func (s Gender) String() string {
3147	return awsutil.Prettify(s)
3148}
3149
3150// GoString returns the string representation
3151func (s Gender) GoString() string {
3152	return s.String()
3153}
3154
3155// SetConfidence sets the Confidence field's value.
3156func (s *Gender) SetConfidence(v float64) *Gender {
3157	s.Confidence = &v
3158	return s
3159}
3160
3161// SetValue sets the Value field's value.
3162func (s *Gender) SetValue(v string) *Gender {
3163	s.Value = &v
3164	return s
3165}
3166
3167type GetCelebrityInfoInput struct {
3168	_ struct{} `type:"structure"`
3169
3170	// The ID for the celebrity. You get the celebrity ID from a call to the operation,
3171	// which recognizes celebrities in an image.
3172	//
3173	// Id is a required field
3174	Id *string `type:"string" required:"true"`
3175}
3176
3177// String returns the string representation
3178func (s GetCelebrityInfoInput) String() string {
3179	return awsutil.Prettify(s)
3180}
3181
3182// GoString returns the string representation
3183func (s GetCelebrityInfoInput) GoString() string {
3184	return s.String()
3185}
3186
3187// Validate inspects the fields of the type to determine if they are valid.
3188func (s *GetCelebrityInfoInput) Validate() error {
3189	invalidParams := request.ErrInvalidParams{Context: "GetCelebrityInfoInput"}
3190	if s.Id == nil {
3191		invalidParams.Add(request.NewErrParamRequired("Id"))
3192	}
3193
3194	if invalidParams.Len() > 0 {
3195		return invalidParams
3196	}
3197	return nil
3198}
3199
3200// SetId sets the Id field's value.
3201func (s *GetCelebrityInfoInput) SetId(v string) *GetCelebrityInfoInput {
3202	s.Id = &v
3203	return s
3204}
3205
3206type GetCelebrityInfoOutput struct {
3207	_ struct{} `type:"structure"`
3208
3209	// The name of the celebrity.
3210	Name *string `type:"string"`
3211
3212	// An array of URLs pointing to additional celebrity information.
3213	Urls []*string `type:"list"`
3214}
3215
3216// String returns the string representation
3217func (s GetCelebrityInfoOutput) String() string {
3218	return awsutil.Prettify(s)
3219}
3220
3221// GoString returns the string representation
3222func (s GetCelebrityInfoOutput) GoString() string {
3223	return s.String()
3224}
3225
3226// SetName sets the Name field's value.
3227func (s *GetCelebrityInfoOutput) SetName(v string) *GetCelebrityInfoOutput {
3228	s.Name = &v
3229	return s
3230}
3231
3232// SetUrls sets the Urls field's value.
3233func (s *GetCelebrityInfoOutput) SetUrls(v []*string) *GetCelebrityInfoOutput {
3234	s.Urls = v
3235	return s
3236}
3237
3238// Provides the input image either as bytes or an S3 object.
3239//
3240// You pass image bytes to a Rekognition API operation by using the Bytes property.
3241// For example, you would use the Bytes property to pass an image loaded from
3242// a local file system. Image bytes passed by using the Bytes property must
3243// be base64-encoded. Your code may not need to encode image bytes if you are
3244// using an AWS SDK to call Rekognition API operations. For more information,
3245// see example4.
3246//
3247// You pass images stored in an S3 bucket to a Rekognition API operation by
3248// using the S3Object property. Images stored in an S3 bucket do not need to
3249// be base64-encoded.
3250//
3251// The region for the S3 bucket containing the S3 object must match the region
3252// you use for Amazon Rekognition operations.
3253//
3254// If you use the Amazon CLI to call Amazon Rekognition operations, passing
3255// image bytes using the Bytes property is not supported. You must first upload
3256// the image to an Amazon S3 bucket and then call the operation using the S3Object
3257// property.
3258//
3259// For Amazon Rekognition to process an S3 object, the user must have permission
3260// to access the S3 object. For more information, see manage-access-resource-policies.
3261type Image struct {
3262	_ struct{} `type:"structure"`
3263
3264	// Blob of image bytes up to 5 MBs.
3265	//
3266	// Bytes is automatically base64 encoded/decoded by the SDK.
3267	Bytes []byte `min:"1" type:"blob"`
3268
3269	// Identifies an S3 object as the image source.
3270	S3Object *S3Object `type:"structure"`
3271}
3272
3273// String returns the string representation
3274func (s Image) String() string {
3275	return awsutil.Prettify(s)
3276}
3277
3278// GoString returns the string representation
3279func (s Image) GoString() string {
3280	return s.String()
3281}
3282
3283// Validate inspects the fields of the type to determine if they are valid.
3284func (s *Image) Validate() error {
3285	invalidParams := request.ErrInvalidParams{Context: "Image"}
3286	if s.Bytes != nil && len(s.Bytes) < 1 {
3287		invalidParams.Add(request.NewErrParamMinLen("Bytes", 1))
3288	}
3289	if s.S3Object != nil {
3290		if err := s.S3Object.Validate(); err != nil {
3291			invalidParams.AddNested("S3Object", err.(request.ErrInvalidParams))
3292		}
3293	}
3294
3295	if invalidParams.Len() > 0 {
3296		return invalidParams
3297	}
3298	return nil
3299}
3300
3301// SetBytes sets the Bytes field's value.
3302func (s *Image) SetBytes(v []byte) *Image {
3303	s.Bytes = v
3304	return s
3305}
3306
3307// SetS3Object sets the S3Object field's value.
3308func (s *Image) SetS3Object(v *S3Object) *Image {
3309	s.S3Object = v
3310	return s
3311}
3312
3313// Identifies face image brightness and sharpness.
3314type ImageQuality struct {
3315	_ struct{} `type:"structure"`
3316
3317	// Value representing brightness of the face. The service returns a value between
3318	// 0 and 100 (inclusive). A higher value indicates a brighter face image.
3319	Brightness *float64 `type:"float"`
3320
3321	// Value representing sharpness of the face. The service returns a value between
3322	// 0 and 100 (inclusive). A higher value indicates a sharper face image.
3323	Sharpness *float64 `type:"float"`
3324}
3325
3326// String returns the string representation
3327func (s ImageQuality) String() string {
3328	return awsutil.Prettify(s)
3329}
3330
3331// GoString returns the string representation
3332func (s ImageQuality) GoString() string {
3333	return s.String()
3334}
3335
3336// SetBrightness sets the Brightness field's value.
3337func (s *ImageQuality) SetBrightness(v float64) *ImageQuality {
3338	s.Brightness = &v
3339	return s
3340}
3341
3342// SetSharpness sets the Sharpness field's value.
3343func (s *ImageQuality) SetSharpness(v float64) *ImageQuality {
3344	s.Sharpness = &v
3345	return s
3346}
3347
3348type IndexFacesInput struct {
3349	_ struct{} `type:"structure"`
3350
3351	// The ID of an existing collection to which you want to add the faces that
3352	// are detected in the input images.
3353	//
3354	// CollectionId is a required field
3355	CollectionId *string `min:"1" type:"string" required:"true"`
3356
3357	// An array of facial attributes that you want to be returned. This can be the
3358	// default list of attributes or all attributes. If you don't specify a value
3359	// for Attributes or if you specify ["DEFAULT"], the API returns the following
3360	// subset of facial attributes: BoundingBox, Confidence, Pose, Quality and Landmarks.
3361	// If you provide ["ALL"], all facial attributes are returned but the operation
3362	// will take longer to complete.
3363	//
3364	// If you provide both, ["ALL", "DEFAULT"], the service uses a logical AND operator
3365	// to determine which attributes to return (in this case, all attributes).
3366	DetectionAttributes []*string `type:"list"`
3367
3368	// ID you want to assign to all the faces detected in the image.
3369	ExternalImageId *string `min:"1" type:"string"`
3370
3371	// The input image as bytes or an S3 object.
3372	//
3373	// Image is a required field
3374	Image *Image `type:"structure" required:"true"`
3375}
3376
3377// String returns the string representation
3378func (s IndexFacesInput) String() string {
3379	return awsutil.Prettify(s)
3380}
3381
3382// GoString returns the string representation
3383func (s IndexFacesInput) GoString() string {
3384	return s.String()
3385}
3386
3387// Validate inspects the fields of the type to determine if they are valid.
3388func (s *IndexFacesInput) Validate() error {
3389	invalidParams := request.ErrInvalidParams{Context: "IndexFacesInput"}
3390	if s.CollectionId == nil {
3391		invalidParams.Add(request.NewErrParamRequired("CollectionId"))
3392	}
3393	if s.CollectionId != nil && len(*s.CollectionId) < 1 {
3394		invalidParams.Add(request.NewErrParamMinLen("CollectionId", 1))
3395	}
3396	if s.ExternalImageId != nil && len(*s.ExternalImageId) < 1 {
3397		invalidParams.Add(request.NewErrParamMinLen("ExternalImageId", 1))
3398	}
3399	if s.Image == nil {
3400		invalidParams.Add(request.NewErrParamRequired("Image"))
3401	}
3402	if s.Image != nil {
3403		if err := s.Image.Validate(); err != nil {
3404			invalidParams.AddNested("Image", err.(request.ErrInvalidParams))
3405		}
3406	}
3407
3408	if invalidParams.Len() > 0 {
3409		return invalidParams
3410	}
3411	return nil
3412}
3413
3414// SetCollectionId sets the CollectionId field's value.
3415func (s *IndexFacesInput) SetCollectionId(v string) *IndexFacesInput {
3416	s.CollectionId = &v
3417	return s
3418}
3419
3420// SetDetectionAttributes sets the DetectionAttributes field's value.
3421func (s *IndexFacesInput) SetDetectionAttributes(v []*string) *IndexFacesInput {
3422	s.DetectionAttributes = v
3423	return s
3424}
3425
3426// SetExternalImageId sets the ExternalImageId field's value.
3427func (s *IndexFacesInput) SetExternalImageId(v string) *IndexFacesInput {
3428	s.ExternalImageId = &v
3429	return s
3430}
3431
3432// SetImage sets the Image field's value.
3433func (s *IndexFacesInput) SetImage(v *Image) *IndexFacesInput {
3434	s.Image = v
3435	return s
3436}
3437
3438type IndexFacesOutput struct {
3439	_ struct{} `type:"structure"`
3440
3441	// An array of faces detected and added to the collection. For more information,
3442	// see howitworks-index-faces.
3443	FaceRecords []*FaceRecord `type:"list"`
3444
3445	// The orientation of the input image (counterclockwise direction). If your
3446	// application displays the image, you can use this value to correct image orientation.
3447	// The bounding box coordinates returned in FaceRecords represent face locations
3448	// before the image orientation is corrected.
3449	//
3450	// If the input image is in jpeg format, it might contain exchangeable image
3451	// (Exif) metadata. If so, and the Exif metadata populates the orientation field,
3452	// the value of OrientationCorrection is null and the bounding box coordinates
3453	// in FaceRecords represent face locations after Exif metadata is used to correct
3454	// the image orientation. Images in .png format don't contain Exif metadata.
3455	OrientationCorrection *string `type:"string" enum:"OrientationCorrection"`
3456}
3457
3458// String returns the string representation
3459func (s IndexFacesOutput) String() string {
3460	return awsutil.Prettify(s)
3461}
3462
3463// GoString returns the string representation
3464func (s IndexFacesOutput) GoString() string {
3465	return s.String()
3466}
3467
3468// SetFaceRecords sets the FaceRecords field's value.
3469func (s *IndexFacesOutput) SetFaceRecords(v []*FaceRecord) *IndexFacesOutput {
3470	s.FaceRecords = v
3471	return s
3472}
3473
3474// SetOrientationCorrection sets the OrientationCorrection field's value.
3475func (s *IndexFacesOutput) SetOrientationCorrection(v string) *IndexFacesOutput {
3476	s.OrientationCorrection = &v
3477	return s
3478}
3479
3480// Structure containing details about the detected label, including name, and
3481// level of confidence.
3482type Label struct {
3483	_ struct{} `type:"structure"`
3484
3485	// Level of confidence.
3486	Confidence *float64 `type:"float"`
3487
3488	// The name (label) of the object.
3489	Name *string `type:"string"`
3490}
3491
3492// String returns the string representation
3493func (s Label) String() string {
3494	return awsutil.Prettify(s)
3495}
3496
3497// GoString returns the string representation
3498func (s Label) GoString() string {
3499	return s.String()
3500}
3501
3502// SetConfidence sets the Confidence field's value.
3503func (s *Label) SetConfidence(v float64) *Label {
3504	s.Confidence = &v
3505	return s
3506}
3507
3508// SetName sets the Name field's value.
3509func (s *Label) SetName(v string) *Label {
3510	s.Name = &v
3511	return s
3512}
3513
3514// Indicates the location of the landmark on the face.
3515type Landmark struct {
3516	_ struct{} `type:"structure"`
3517
3518	// Type of the landmark.
3519	Type *string `type:"string" enum:"LandmarkType"`
3520
3521	// x-coordinate from the top left of the landmark expressed as the ratio of
3522	// the width of the image. For example, if the images is 700x200 and the x-coordinate
3523	// of the landmark is at 350 pixels, this value is 0.5.
3524	X *float64 `type:"float"`
3525
3526	// y-coordinate from the top left of the landmark expressed as the ratio of
3527	// the height of the image. For example, if the images is 700x200 and the y-coordinate
3528	// of the landmark is at 100 pixels, this value is 0.5.
3529	Y *float64 `type:"float"`
3530}
3531
3532// String returns the string representation
3533func (s Landmark) String() string {
3534	return awsutil.Prettify(s)
3535}
3536
3537// GoString returns the string representation
3538func (s Landmark) GoString() string {
3539	return s.String()
3540}
3541
3542// SetType sets the Type field's value.
3543func (s *Landmark) SetType(v string) *Landmark {
3544	s.Type = &v
3545	return s
3546}
3547
3548// SetX sets the X field's value.
3549func (s *Landmark) SetX(v float64) *Landmark {
3550	s.X = &v
3551	return s
3552}
3553
3554// SetY sets the Y field's value.
3555func (s *Landmark) SetY(v float64) *Landmark {
3556	s.Y = &v
3557	return s
3558}
3559
3560type ListCollectionsInput struct {
3561	_ struct{} `type:"structure"`
3562
3563	// Maximum number of collection IDs to return.
3564	MaxResults *int64 `type:"integer"`
3565
3566	// Pagination token from the previous response.
3567	NextToken *string `type:"string"`
3568}
3569
3570// String returns the string representation
3571func (s ListCollectionsInput) String() string {
3572	return awsutil.Prettify(s)
3573}
3574
3575// GoString returns the string representation
3576func (s ListCollectionsInput) GoString() string {
3577	return s.String()
3578}
3579
3580// SetMaxResults sets the MaxResults field's value.
3581func (s *ListCollectionsInput) SetMaxResults(v int64) *ListCollectionsInput {
3582	s.MaxResults = &v
3583	return s
3584}
3585
3586// SetNextToken sets the NextToken field's value.
3587func (s *ListCollectionsInput) SetNextToken(v string) *ListCollectionsInput {
3588	s.NextToken = &v
3589	return s
3590}
3591
3592type ListCollectionsOutput struct {
3593	_ struct{} `type:"structure"`
3594
3595	// An array of collection IDs.
3596	CollectionIds []*string `type:"list"`
3597
3598	// If the result is truncated, the response provides a NextToken that you can
3599	// use in the subsequent request to fetch the next set of collection IDs.
3600	NextToken *string `type:"string"`
3601}
3602
3603// String returns the string representation
3604func (s ListCollectionsOutput) String() string {
3605	return awsutil.Prettify(s)
3606}
3607
3608// GoString returns the string representation
3609func (s ListCollectionsOutput) GoString() string {
3610	return s.String()
3611}
3612
3613// SetCollectionIds sets the CollectionIds field's value.
3614func (s *ListCollectionsOutput) SetCollectionIds(v []*string) *ListCollectionsOutput {
3615	s.CollectionIds = v
3616	return s
3617}
3618
3619// SetNextToken sets the NextToken field's value.
3620func (s *ListCollectionsOutput) SetNextToken(v string) *ListCollectionsOutput {
3621	s.NextToken = &v
3622	return s
3623}
3624
3625type ListFacesInput struct {
3626	_ struct{} `type:"structure"`
3627
3628	// ID of the collection from which to list the faces.
3629	//
3630	// CollectionId is a required field
3631	CollectionId *string `min:"1" type:"string" required:"true"`
3632
3633	// Maximum number of faces to return.
3634	MaxResults *int64 `type:"integer"`
3635
3636	// If the previous response was incomplete (because there is more data to retrieve),
3637	// Amazon Rekognition returns a pagination token in the response. You can use
3638	// this pagination token to retrieve the next set of faces.
3639	NextToken *string `type:"string"`
3640}
3641
3642// String returns the string representation
3643func (s ListFacesInput) String() string {
3644	return awsutil.Prettify(s)
3645}
3646
3647// GoString returns the string representation
3648func (s ListFacesInput) GoString() string {
3649	return s.String()
3650}
3651
3652// Validate inspects the fields of the type to determine if they are valid.
3653func (s *ListFacesInput) Validate() error {
3654	invalidParams := request.ErrInvalidParams{Context: "ListFacesInput"}
3655	if s.CollectionId == nil {
3656		invalidParams.Add(request.NewErrParamRequired("CollectionId"))
3657	}
3658	if s.CollectionId != nil && len(*s.CollectionId) < 1 {
3659		invalidParams.Add(request.NewErrParamMinLen("CollectionId", 1))
3660	}
3661
3662	if invalidParams.Len() > 0 {
3663		return invalidParams
3664	}
3665	return nil
3666}
3667
3668// SetCollectionId sets the CollectionId field's value.
3669func (s *ListFacesInput) SetCollectionId(v string) *ListFacesInput {
3670	s.CollectionId = &v
3671	return s
3672}
3673
3674// SetMaxResults sets the MaxResults field's value.
3675func (s *ListFacesInput) SetMaxResults(v int64) *ListFacesInput {
3676	s.MaxResults = &v
3677	return s
3678}
3679
3680// SetNextToken sets the NextToken field's value.
3681func (s *ListFacesInput) SetNextToken(v string) *ListFacesInput {
3682	s.NextToken = &v
3683	return s
3684}
3685
3686type ListFacesOutput struct {
3687	_ struct{} `type:"structure"`
3688
3689	// An array of Face objects.
3690	Faces []*Face `type:"list"`
3691
3692	// If the response is truncated, Amazon Rekognition returns this token that
3693	// you can use in the subsequent request to retrieve the next set of faces.
3694	NextToken *string `type:"string"`
3695}
3696
3697// String returns the string representation
3698func (s ListFacesOutput) String() string {
3699	return awsutil.Prettify(s)
3700}
3701
3702// GoString returns the string representation
3703func (s ListFacesOutput) GoString() string {
3704	return s.String()
3705}
3706
3707// SetFaces sets the Faces field's value.
3708func (s *ListFacesOutput) SetFaces(v []*Face) *ListFacesOutput {
3709	s.Faces = v
3710	return s
3711}
3712
3713// SetNextToken sets the NextToken field's value.
3714func (s *ListFacesOutput) SetNextToken(v string) *ListFacesOutput {
3715	s.NextToken = &v
3716	return s
3717}
3718
3719// Provides information about a single type of moderated content found in an
3720// image. Each type of moderated content has a label within a hierarchical taxonomy.
3721// For more information, see image-moderation.
3722type ModerationLabel struct {
3723	_ struct{} `type:"structure"`
3724
3725	// Specifies the confidence that Amazon Rekognition has that the label has been
3726	// correctly identified.
3727	//
3728	// If you don't specify the MinConfidence parameter in the call to DetectModerationLabels,
3729	// the operation returns labels with a confidence value greater than or equal
3730	// to 50 percent.
3731	Confidence *float64 `type:"float"`
3732
3733	// The label name for the type of content detected in the image.
3734	Name *string `type:"string"`
3735
3736	// The name for the parent label. Labels at the top-level of the hierarchy have
3737	// the parent label "".
3738	ParentName *string `type:"string"`
3739}
3740
3741// String returns the string representation
3742func (s ModerationLabel) String() string {
3743	return awsutil.Prettify(s)
3744}
3745
3746// GoString returns the string representation
3747func (s ModerationLabel) GoString() string {
3748	return s.String()
3749}
3750
3751// SetConfidence sets the Confidence field's value.
3752func (s *ModerationLabel) SetConfidence(v float64) *ModerationLabel {
3753	s.Confidence = &v
3754	return s
3755}
3756
3757// SetName sets the Name field's value.
3758func (s *ModerationLabel) SetName(v string) *ModerationLabel {
3759	s.Name = &v
3760	return s
3761}
3762
3763// SetParentName sets the ParentName field's value.
3764func (s *ModerationLabel) SetParentName(v string) *ModerationLabel {
3765	s.ParentName = &v
3766	return s
3767}
3768
3769// Indicates whether or not the mouth on the face is open, and the confidence
3770// level in the determination.
3771type MouthOpen struct {
3772	_ struct{} `type:"structure"`
3773
3774	// Level of confidence in the determination.
3775	Confidence *float64 `type:"float"`
3776
3777	// Boolean value that indicates whether the mouth on the face is open or not.
3778	Value *bool `type:"boolean"`
3779}
3780
3781// String returns the string representation
3782func (s MouthOpen) String() string {
3783	return awsutil.Prettify(s)
3784}
3785
3786// GoString returns the string representation
3787func (s MouthOpen) GoString() string {
3788	return s.String()
3789}
3790
3791// SetConfidence sets the Confidence field's value.
3792func (s *MouthOpen) SetConfidence(v float64) *MouthOpen {
3793	s.Confidence = &v
3794	return s
3795}
3796
3797// SetValue sets the Value field's value.
3798func (s *MouthOpen) SetValue(v bool) *MouthOpen {
3799	s.Value = &v
3800	return s
3801}
3802
3803// Indicates whether or not the face has a mustache, and the confidence level
3804// in the determination.
3805type Mustache struct {
3806	_ struct{} `type:"structure"`
3807
3808	// Level of confidence in the determination.
3809	Confidence *float64 `type:"float"`
3810
3811	// Boolean value that indicates whether the face has mustache or not.
3812	Value *bool `type:"boolean"`
3813}
3814
3815// String returns the string representation
3816func (s Mustache) String() string {
3817	return awsutil.Prettify(s)
3818}
3819
3820// GoString returns the string representation
3821func (s Mustache) GoString() string {
3822	return s.String()
3823}
3824
3825// SetConfidence sets the Confidence field's value.
3826func (s *Mustache) SetConfidence(v float64) *Mustache {
3827	s.Confidence = &v
3828	return s
3829}
3830
3831// SetValue sets the Value field's value.
3832func (s *Mustache) SetValue(v bool) *Mustache {
3833	s.Value = &v
3834	return s
3835}
3836
3837// Indicates the pose of the face as determined by its pitch, roll, and yaw.
3838type Pose struct {
3839	_ struct{} `type:"structure"`
3840
3841	// Value representing the face rotation on the pitch axis.
3842	Pitch *float64 `type:"float"`
3843
3844	// Value representing the face rotation on the roll axis.
3845	Roll *float64 `type:"float"`
3846
3847	// Value representing the face rotation on the yaw axis.
3848	Yaw *float64 `type:"float"`
3849}
3850
3851// String returns the string representation
3852func (s Pose) String() string {
3853	return awsutil.Prettify(s)
3854}
3855
3856// GoString returns the string representation
3857func (s Pose) GoString() string {
3858	return s.String()
3859}
3860
3861// SetPitch sets the Pitch field's value.
3862func (s *Pose) SetPitch(v float64) *Pose {
3863	s.Pitch = &v
3864	return s
3865}
3866
3867// SetRoll sets the Roll field's value.
3868func (s *Pose) SetRoll(v float64) *Pose {
3869	s.Roll = &v
3870	return s
3871}
3872
3873// SetYaw sets the Yaw field's value.
3874func (s *Pose) SetYaw(v float64) *Pose {
3875	s.Yaw = &v
3876	return s
3877}
3878
3879type RecognizeCelebritiesInput struct {
3880	_ struct{} `type:"structure"`
3881
3882	// The input image to use for celebrity recognition.
3883	//
3884	// Image is a required field
3885	Image *Image `type:"structure" required:"true"`
3886}
3887
3888// String returns the string representation
3889func (s RecognizeCelebritiesInput) String() string {
3890	return awsutil.Prettify(s)
3891}
3892
3893// GoString returns the string representation
3894func (s RecognizeCelebritiesInput) GoString() string {
3895	return s.String()
3896}
3897
3898// Validate inspects the fields of the type to determine if they are valid.
3899func (s *RecognizeCelebritiesInput) Validate() error {
3900	invalidParams := request.ErrInvalidParams{Context: "RecognizeCelebritiesInput"}
3901	if s.Image == nil {
3902		invalidParams.Add(request.NewErrParamRequired("Image"))
3903	}
3904	if s.Image != nil {
3905		if err := s.Image.Validate(); err != nil {
3906			invalidParams.AddNested("Image", err.(request.ErrInvalidParams))
3907		}
3908	}
3909
3910	if invalidParams.Len() > 0 {
3911		return invalidParams
3912	}
3913	return nil
3914}
3915
3916// SetImage sets the Image field's value.
3917func (s *RecognizeCelebritiesInput) SetImage(v *Image) *RecognizeCelebritiesInput {
3918	s.Image = v
3919	return s
3920}
3921
3922type RecognizeCelebritiesOutput struct {
3923	_ struct{} `type:"structure"`
3924
3925	// Details about each celebrity found in the image. Amazon Rekognition can detect
3926	// a maximum of 15 celebrities in an image.
3927	CelebrityFaces []*Celebrity `type:"list"`
3928
3929	// The orientation of the input image (counterclockwise direction). If your
3930	// application displays the image, you can use this value to correct the orientation.
3931	// The bounding box coordinates returned in CelebrityFaces and UnrecognizedFaces
3932	// represent face locations before the image orientation is corrected.
3933	//
3934	// If the input image is in .jpeg format, it might contain exchangeable image
3935	// (Exif) metadata that includes the image's orientation. If so, and the Exif
3936	// metadata for the input image populates the orientation field, the value of
3937	// OrientationCorrection is null and the CelebrityFaces and UnrecognizedFaces
3938	// bounding box coordinates represent face locations after Exif metadata is
3939	// used to correct the image orientation. Images in .png format don't contain
3940	// Exif metadata.
3941	OrientationCorrection *string `type:"string" enum:"OrientationCorrection"`
3942
3943	// Details about each unrecognized face in the image.
3944	UnrecognizedFaces []*ComparedFace `type:"list"`
3945}
3946
3947// String returns the string representation
3948func (s RecognizeCelebritiesOutput) String() string {
3949	return awsutil.Prettify(s)
3950}
3951
3952// GoString returns the string representation
3953func (s RecognizeCelebritiesOutput) GoString() string {
3954	return s.String()
3955}
3956
3957// SetCelebrityFaces sets the CelebrityFaces field's value.
3958func (s *RecognizeCelebritiesOutput) SetCelebrityFaces(v []*Celebrity) *RecognizeCelebritiesOutput {
3959	s.CelebrityFaces = v
3960	return s
3961}
3962
3963// SetOrientationCorrection sets the OrientationCorrection field's value.
3964func (s *RecognizeCelebritiesOutput) SetOrientationCorrection(v string) *RecognizeCelebritiesOutput {
3965	s.OrientationCorrection = &v
3966	return s
3967}
3968
3969// SetUnrecognizedFaces sets the UnrecognizedFaces field's value.
3970func (s *RecognizeCelebritiesOutput) SetUnrecognizedFaces(v []*ComparedFace) *RecognizeCelebritiesOutput {
3971	s.UnrecognizedFaces = v
3972	return s
3973}
3974
3975// Provides the S3 bucket name and object name.
3976//
3977// The region for the S3 bucket containing the S3 object must match the region
3978// you use for Amazon Rekognition operations.
3979//
3980// For Amazon Rekognition to process an S3 object, the user must have permission
3981// to access the S3 object. For more information, see manage-access-resource-policies.
3982type S3Object struct {
3983	_ struct{} `type:"structure"`
3984
3985	// Name of the S3 bucket.
3986	Bucket *string `min:"3" type:"string"`
3987
3988	// S3 object key name.
3989	Name *string `min:"1" type:"string"`
3990
3991	// If the bucket is versioning enabled, you can specify the object version.
3992	Version *string `min:"1" type:"string"`
3993}
3994
3995// String returns the string representation
3996func (s S3Object) String() string {
3997	return awsutil.Prettify(s)
3998}
3999
4000// GoString returns the string representation
4001func (s S3Object) GoString() string {
4002	return s.String()
4003}
4004
4005// Validate inspects the fields of the type to determine if they are valid.
4006func (s *S3Object) Validate() error {
4007	invalidParams := request.ErrInvalidParams{Context: "S3Object"}
4008	if s.Bucket != nil && len(*s.Bucket) < 3 {
4009		invalidParams.Add(request.NewErrParamMinLen("Bucket", 3))
4010	}
4011	if s.Name != nil && len(*s.Name) < 1 {
4012		invalidParams.Add(request.NewErrParamMinLen("Name", 1))
4013	}
4014	if s.Version != nil && len(*s.Version) < 1 {
4015		invalidParams.Add(request.NewErrParamMinLen("Version", 1))
4016	}
4017
4018	if invalidParams.Len() > 0 {
4019		return invalidParams
4020	}
4021	return nil
4022}
4023
4024// SetBucket sets the Bucket field's value.
4025func (s *S3Object) SetBucket(v string) *S3Object {
4026	s.Bucket = &v
4027	return s
4028}
4029
4030// SetName sets the Name field's value.
4031func (s *S3Object) SetName(v string) *S3Object {
4032	s.Name = &v
4033	return s
4034}
4035
4036// SetVersion sets the Version field's value.
4037func (s *S3Object) SetVersion(v string) *S3Object {
4038	s.Version = &v
4039	return s
4040}
4041
4042type SearchFacesByImageInput struct {
4043	_ struct{} `type:"structure"`
4044
4045	// ID of the collection to search.
4046	//
4047	// CollectionId is a required field
4048	CollectionId *string `min:"1" type:"string" required:"true"`
4049
4050	// (Optional) Specifies the minimum confidence in the face match to return.
4051	// For example, don't return any matches where confidence in matches is less
4052	// than 70%.
4053	FaceMatchThreshold *float64 `type:"float"`
4054
4055	// The input image as bytes or an S3 object.
4056	//
4057	// Image is a required field
4058	Image *Image `type:"structure" required:"true"`
4059
4060	// Maximum number of faces to return. The operation returns the maximum number
4061	// of faces with the highest confidence in the match.
4062	MaxFaces *int64 `min:"1" type:"integer"`
4063}
4064
4065// String returns the string representation
4066func (s SearchFacesByImageInput) String() string {
4067	return awsutil.Prettify(s)
4068}
4069
4070// GoString returns the string representation
4071func (s SearchFacesByImageInput) GoString() string {
4072	return s.String()
4073}
4074
4075// Validate inspects the fields of the type to determine if they are valid.
4076func (s *SearchFacesByImageInput) Validate() error {
4077	invalidParams := request.ErrInvalidParams{Context: "SearchFacesByImageInput"}
4078	if s.CollectionId == nil {
4079		invalidParams.Add(request.NewErrParamRequired("CollectionId"))
4080	}
4081	if s.CollectionId != nil && len(*s.CollectionId) < 1 {
4082		invalidParams.Add(request.NewErrParamMinLen("CollectionId", 1))
4083	}
4084	if s.Image == nil {
4085		invalidParams.Add(request.NewErrParamRequired("Image"))
4086	}
4087	if s.MaxFaces != nil && *s.MaxFaces < 1 {
4088		invalidParams.Add(request.NewErrParamMinValue("MaxFaces", 1))
4089	}
4090	if s.Image != nil {
4091		if err := s.Image.Validate(); err != nil {
4092			invalidParams.AddNested("Image", err.(request.ErrInvalidParams))
4093		}
4094	}
4095
4096	if invalidParams.Len() > 0 {
4097		return invalidParams
4098	}
4099	return nil
4100}
4101
4102// SetCollectionId sets the CollectionId field's value.
4103func (s *SearchFacesByImageInput) SetCollectionId(v string) *SearchFacesByImageInput {
4104	s.CollectionId = &v
4105	return s
4106}
4107
4108// SetFaceMatchThreshold sets the FaceMatchThreshold field's value.
4109func (s *SearchFacesByImageInput) SetFaceMatchThreshold(v float64) *SearchFacesByImageInput {
4110	s.FaceMatchThreshold = &v
4111	return s
4112}
4113
4114// SetImage sets the Image field's value.
4115func (s *SearchFacesByImageInput) SetImage(v *Image) *SearchFacesByImageInput {
4116	s.Image = v
4117	return s
4118}
4119
4120// SetMaxFaces sets the MaxFaces field's value.
4121func (s *SearchFacesByImageInput) SetMaxFaces(v int64) *SearchFacesByImageInput {
4122	s.MaxFaces = &v
4123	return s
4124}
4125
4126type SearchFacesByImageOutput struct {
4127	_ struct{} `type:"structure"`
4128
4129	// An array of faces that match the input face, along with the confidence in
4130	// the match.
4131	FaceMatches []*FaceMatch `type:"list"`
4132
4133	// The bounding box around the face in the input image that Amazon Rekognition
4134	// used for the search.
4135	SearchedFaceBoundingBox *BoundingBox `type:"structure"`
4136
4137	// The level of confidence that the searchedFaceBoundingBox, contains a face.
4138	SearchedFaceConfidence *float64 `type:"float"`
4139}
4140
4141// String returns the string representation
4142func (s SearchFacesByImageOutput) String() string {
4143	return awsutil.Prettify(s)
4144}
4145
4146// GoString returns the string representation
4147func (s SearchFacesByImageOutput) GoString() string {
4148	return s.String()
4149}
4150
4151// SetFaceMatches sets the FaceMatches field's value.
4152func (s *SearchFacesByImageOutput) SetFaceMatches(v []*FaceMatch) *SearchFacesByImageOutput {
4153	s.FaceMatches = v
4154	return s
4155}
4156
4157// SetSearchedFaceBoundingBox sets the SearchedFaceBoundingBox field's value.
4158func (s *SearchFacesByImageOutput) SetSearchedFaceBoundingBox(v *BoundingBox) *SearchFacesByImageOutput {
4159	s.SearchedFaceBoundingBox = v
4160	return s
4161}
4162
4163// SetSearchedFaceConfidence sets the SearchedFaceConfidence field's value.
4164func (s *SearchFacesByImageOutput) SetSearchedFaceConfidence(v float64) *SearchFacesByImageOutput {
4165	s.SearchedFaceConfidence = &v
4166	return s
4167}
4168
4169type SearchFacesInput struct {
4170	_ struct{} `type:"structure"`
4171
4172	// ID of the collection the face belongs to.
4173	//
4174	// CollectionId is a required field
4175	CollectionId *string `min:"1" type:"string" required:"true"`
4176
4177	// ID of a face to find matches for in the collection.
4178	//
4179	// FaceId is a required field
4180	FaceId *string `type:"string" required:"true"`
4181
4182	// Optional value specifying the minimum confidence in the face match to return.
4183	// For example, don't return any matches where confidence in matches is less
4184	// than 70%.
4185	FaceMatchThreshold *float64 `type:"float"`
4186
4187	// Maximum number of faces to return. The operation returns the maximum number
4188	// of faces with the highest confidence in the match.
4189	MaxFaces *int64 `min:"1" type:"integer"`
4190}
4191
4192// String returns the string representation
4193func (s SearchFacesInput) String() string {
4194	return awsutil.Prettify(s)
4195}
4196
4197// GoString returns the string representation
4198func (s SearchFacesInput) GoString() string {
4199	return s.String()
4200}
4201
4202// Validate inspects the fields of the type to determine if they are valid.
4203func (s *SearchFacesInput) Validate() error {
4204	invalidParams := request.ErrInvalidParams{Context: "SearchFacesInput"}
4205	if s.CollectionId == nil {
4206		invalidParams.Add(request.NewErrParamRequired("CollectionId"))
4207	}
4208	if s.CollectionId != nil && len(*s.CollectionId) < 1 {
4209		invalidParams.Add(request.NewErrParamMinLen("CollectionId", 1))
4210	}
4211	if s.FaceId == nil {
4212		invalidParams.Add(request.NewErrParamRequired("FaceId"))
4213	}
4214	if s.MaxFaces != nil && *s.MaxFaces < 1 {
4215		invalidParams.Add(request.NewErrParamMinValue("MaxFaces", 1))
4216	}
4217
4218	if invalidParams.Len() > 0 {
4219		return invalidParams
4220	}
4221	return nil
4222}
4223
4224// SetCollectionId sets the CollectionId field's value.
4225func (s *SearchFacesInput) SetCollectionId(v string) *SearchFacesInput {
4226	s.CollectionId = &v
4227	return s
4228}
4229
4230// SetFaceId sets the FaceId field's value.
4231func (s *SearchFacesInput) SetFaceId(v string) *SearchFacesInput {
4232	s.FaceId = &v
4233	return s
4234}
4235
4236// SetFaceMatchThreshold sets the FaceMatchThreshold field's value.
4237func (s *SearchFacesInput) SetFaceMatchThreshold(v float64) *SearchFacesInput {
4238	s.FaceMatchThreshold = &v
4239	return s
4240}
4241
4242// SetMaxFaces sets the MaxFaces field's value.
4243func (s *SearchFacesInput) SetMaxFaces(v int64) *SearchFacesInput {
4244	s.MaxFaces = &v
4245	return s
4246}
4247
4248type SearchFacesOutput struct {
4249	_ struct{} `type:"structure"`
4250
4251	// An array of faces that matched the input face, along with the confidence
4252	// in the match.
4253	FaceMatches []*FaceMatch `type:"list"`
4254
4255	// ID of the face that was searched for matches in a collection.
4256	SearchedFaceId *string `type:"string"`
4257}
4258
4259// String returns the string representation
4260func (s SearchFacesOutput) String() string {
4261	return awsutil.Prettify(s)
4262}
4263
4264// GoString returns the string representation
4265func (s SearchFacesOutput) GoString() string {
4266	return s.String()
4267}
4268
4269// SetFaceMatches sets the FaceMatches field's value.
4270func (s *SearchFacesOutput) SetFaceMatches(v []*FaceMatch) *SearchFacesOutput {
4271	s.FaceMatches = v
4272	return s
4273}
4274
4275// SetSearchedFaceId sets the SearchedFaceId field's value.
4276func (s *SearchFacesOutput) SetSearchedFaceId(v string) *SearchFacesOutput {
4277	s.SearchedFaceId = &v
4278	return s
4279}
4280
4281// Indicates whether or not the face is smiling, and the confidence level in
4282// the determination.
4283type Smile struct {
4284	_ struct{} `type:"structure"`
4285
4286	// Level of confidence in the determination.
4287	Confidence *float64 `type:"float"`
4288
4289	// Boolean value that indicates whether the face is smiling or not.
4290	Value *bool `type:"boolean"`
4291}
4292
4293// String returns the string representation
4294func (s Smile) String() string {
4295	return awsutil.Prettify(s)
4296}
4297
4298// GoString returns the string representation
4299func (s Smile) GoString() string {
4300	return s.String()
4301}
4302
4303// SetConfidence sets the Confidence field's value.
4304func (s *Smile) SetConfidence(v float64) *Smile {
4305	s.Confidence = &v
4306	return s
4307}
4308
4309// SetValue sets the Value field's value.
4310func (s *Smile) SetValue(v bool) *Smile {
4311	s.Value = &v
4312	return s
4313}
4314
4315// Indicates whether or not the face is wearing sunglasses, and the confidence
4316// level in the determination.
4317type Sunglasses struct {
4318	_ struct{} `type:"structure"`
4319
4320	// Level of confidence in the determination.
4321	Confidence *float64 `type:"float"`
4322
4323	// Boolean value that indicates whether the face is wearing sunglasses or not.
4324	Value *bool `type:"boolean"`
4325}
4326
4327// String returns the string representation
4328func (s Sunglasses) String() string {
4329	return awsutil.Prettify(s)
4330}
4331
4332// GoString returns the string representation
4333func (s Sunglasses) GoString() string {
4334	return s.String()
4335}
4336
4337// SetConfidence sets the Confidence field's value.
4338func (s *Sunglasses) SetConfidence(v float64) *Sunglasses {
4339	s.Confidence = &v
4340	return s
4341}
4342
4343// SetValue sets the Value field's value.
4344func (s *Sunglasses) SetValue(v bool) *Sunglasses {
4345	s.Value = &v
4346	return s
4347}
4348
4349const (
4350	// AttributeDefault is a Attribute enum value
4351	AttributeDefault = "DEFAULT"
4352
4353	// AttributeAll is a Attribute enum value
4354	AttributeAll = "ALL"
4355)
4356
4357const (
4358	// EmotionNameHappy is a EmotionName enum value
4359	EmotionNameHappy = "HAPPY"
4360
4361	// EmotionNameSad is a EmotionName enum value
4362	EmotionNameSad = "SAD"
4363
4364	// EmotionNameAngry is a EmotionName enum value
4365	EmotionNameAngry = "ANGRY"
4366
4367	// EmotionNameConfused is a EmotionName enum value
4368	EmotionNameConfused = "CONFUSED"
4369
4370	// EmotionNameDisgusted is a EmotionName enum value
4371	EmotionNameDisgusted = "DISGUSTED"
4372
4373	// EmotionNameSurprised is a EmotionName enum value
4374	EmotionNameSurprised = "SURPRISED"
4375
4376	// EmotionNameCalm is a EmotionName enum value
4377	EmotionNameCalm = "CALM"
4378
4379	// EmotionNameUnknown is a EmotionName enum value
4380	EmotionNameUnknown = "UNKNOWN"
4381)
4382
4383const (
4384	// GenderTypeMale is a GenderType enum value
4385	GenderTypeMale = "Male"
4386
4387	// GenderTypeFemale is a GenderType enum value
4388	GenderTypeFemale = "Female"
4389)
4390
4391const (
4392	// LandmarkTypeEyeLeft is a LandmarkType enum value
4393	LandmarkTypeEyeLeft = "eyeLeft"
4394
4395	// LandmarkTypeEyeRight is a LandmarkType enum value
4396	LandmarkTypeEyeRight = "eyeRight"
4397
4398	// LandmarkTypeNose is a LandmarkType enum value
4399	LandmarkTypeNose = "nose"
4400
4401	// LandmarkTypeMouthLeft is a LandmarkType enum value
4402	LandmarkTypeMouthLeft = "mouthLeft"
4403
4404	// LandmarkTypeMouthRight is a LandmarkType enum value
4405	LandmarkTypeMouthRight = "mouthRight"
4406
4407	// LandmarkTypeLeftEyeBrowLeft is a LandmarkType enum value
4408	LandmarkTypeLeftEyeBrowLeft = "leftEyeBrowLeft"
4409
4410	// LandmarkTypeLeftEyeBrowRight is a LandmarkType enum value
4411	LandmarkTypeLeftEyeBrowRight = "leftEyeBrowRight"
4412
4413	// LandmarkTypeLeftEyeBrowUp is a LandmarkType enum value
4414	LandmarkTypeLeftEyeBrowUp = "leftEyeBrowUp"
4415
4416	// LandmarkTypeRightEyeBrowLeft is a LandmarkType enum value
4417	LandmarkTypeRightEyeBrowLeft = "rightEyeBrowLeft"
4418
4419	// LandmarkTypeRightEyeBrowRight is a LandmarkType enum value
4420	LandmarkTypeRightEyeBrowRight = "rightEyeBrowRight"
4421
4422	// LandmarkTypeRightEyeBrowUp is a LandmarkType enum value
4423	LandmarkTypeRightEyeBrowUp = "rightEyeBrowUp"
4424
4425	// LandmarkTypeLeftEyeLeft is a LandmarkType enum value
4426	LandmarkTypeLeftEyeLeft = "leftEyeLeft"
4427
4428	// LandmarkTypeLeftEyeRight is a LandmarkType enum value
4429	LandmarkTypeLeftEyeRight = "leftEyeRight"
4430
4431	// LandmarkTypeLeftEyeUp is a LandmarkType enum value
4432	LandmarkTypeLeftEyeUp = "leftEyeUp"
4433
4434	// LandmarkTypeLeftEyeDown is a LandmarkType enum value
4435	LandmarkTypeLeftEyeDown = "leftEyeDown"
4436
4437	// LandmarkTypeRightEyeLeft is a LandmarkType enum value
4438	LandmarkTypeRightEyeLeft = "rightEyeLeft"
4439
4440	// LandmarkTypeRightEyeRight is a LandmarkType enum value
4441	LandmarkTypeRightEyeRight = "rightEyeRight"
4442
4443	// LandmarkTypeRightEyeUp is a LandmarkType enum value
4444	LandmarkTypeRightEyeUp = "rightEyeUp"
4445
4446	// LandmarkTypeRightEyeDown is a LandmarkType enum value
4447	LandmarkTypeRightEyeDown = "rightEyeDown"
4448
4449	// LandmarkTypeNoseLeft is a LandmarkType enum value
4450	LandmarkTypeNoseLeft = "noseLeft"
4451
4452	// LandmarkTypeNoseRight is a LandmarkType enum value
4453	LandmarkTypeNoseRight = "noseRight"
4454
4455	// LandmarkTypeMouthUp is a LandmarkType enum value
4456	LandmarkTypeMouthUp = "mouthUp"
4457
4458	// LandmarkTypeMouthDown is a LandmarkType enum value
4459	LandmarkTypeMouthDown = "mouthDown"
4460
4461	// LandmarkTypeLeftPupil is a LandmarkType enum value
4462	LandmarkTypeLeftPupil = "leftPupil"
4463
4464	// LandmarkTypeRightPupil is a LandmarkType enum value
4465	LandmarkTypeRightPupil = "rightPupil"
4466)
4467
4468const (
4469	// OrientationCorrectionRotate0 is a OrientationCorrection enum value
4470	OrientationCorrectionRotate0 = "ROTATE_0"
4471
4472	// OrientationCorrectionRotate90 is a OrientationCorrection enum value
4473	OrientationCorrectionRotate90 = "ROTATE_90"
4474
4475	// OrientationCorrectionRotate180 is a OrientationCorrection enum value
4476	OrientationCorrectionRotate180 = "ROTATE_180"
4477
4478	// OrientationCorrectionRotate270 is a OrientationCorrection enum value
4479	OrientationCorrectionRotate270 = "ROTATE_270"
4480)
4481