1// Code generated by smithy-go-codegen DO NOT EDIT.
2
3package rekognition
4
5import (
6	"context"
7	"fmt"
8	awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
9	"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
10	"github.com/aws/aws-sdk-go-v2/service/rekognition/types"
11	"github.com/aws/smithy-go/middleware"
12	smithyhttp "github.com/aws/smithy-go/transport/http"
13)
14
15// Gets the face search results for Amazon Rekognition Video face search started by
16// StartFaceSearch. The search returns faces in a collection that match the faces
17// of persons detected in a video. It also includes the time(s) that faces are
18// matched in the video. Face search in a video is an asynchronous operation. You
19// start face search by calling to StartFaceSearch which returns a job identifier
20// (JobId). When the search operation finishes, Amazon Rekognition Video publishes
21// a completion status to the Amazon Simple Notification Service topic registered
22// in the initial call to StartFaceSearch. To get the search results, first check
23// that the status value published to the Amazon SNS topic is SUCCEEDED. If so,
24// call GetFaceSearch and pass the job identifier (JobId) from the initial call to
25// StartFaceSearch. For more information, see Searching Faces in a Collection in
26// the Amazon Rekognition Developer Guide. The search results are retured in an
27// array, Persons, of PersonMatch objects. EachPersonMatch element contains details
28// about the matching faces in the input collection, person information (facial
29// attributes, bounding boxes, and person identifer) for the matched person, and
30// the time the person was matched in the video. GetFaceSearch only returns the
31// default
32//
33// facial attributes (BoundingBox, Confidence, Landmarks, Pose, and
34// Quality). The other facial attributes listed in the Face object of the following
35// response syntax are not returned. For more information, see FaceDetail in the
36// Amazon Rekognition Developer Guide. By default, the Persons array is sorted by
37// the time, in milliseconds from the start of the video, persons are matched. You
38// can also sort by persons by specifying INDEX for the SORTBY input parameter.
39func (c *Client) GetFaceSearch(ctx context.Context, params *GetFaceSearchInput, optFns ...func(*Options)) (*GetFaceSearchOutput, error) {
40	if params == nil {
41		params = &GetFaceSearchInput{}
42	}
43
44	result, metadata, err := c.invokeOperation(ctx, "GetFaceSearch", params, optFns, addOperationGetFaceSearchMiddlewares)
45	if err != nil {
46		return nil, err
47	}
48
49	out := result.(*GetFaceSearchOutput)
50	out.ResultMetadata = metadata
51	return out, nil
52}
53
54type GetFaceSearchInput struct {
55
56	// The job identifer for the search request. You get the job identifier from an
57	// initial call to StartFaceSearch.
58	//
59	// This member is required.
60	JobId *string
61
62	// Maximum number of results to return per paginated call. The largest value you
63	// can specify is 1000. If you specify a value greater than 1000, a maximum of 1000
64	// results is returned. The default value is 1000.
65	MaxResults *int32
66
67	// If the previous response was incomplete (because there is more search results to
68	// retrieve), Amazon Rekognition Video returns a pagination token in the response.
69	// You can use this pagination token to retrieve the next set of search results.
70	NextToken *string
71
72	// Sort to use for grouping faces in the response. Use TIMESTAMP to group faces by
73	// the time that they are recognized. Use INDEX to sort by recognized faces.
74	SortBy types.FaceSearchSortBy
75}
76
77type GetFaceSearchOutput struct {
78
79	// The current status of the face search job.
80	JobStatus types.VideoJobStatus
81
82	// If the response is truncated, Amazon Rekognition Video returns this token that
83	// you can use in the subsequent request to retrieve the next set of search
84	// results.
85	NextToken *string
86
87	// An array of persons, PersonMatch, in the video whose face(s) match the face(s)
88	// in an Amazon Rekognition collection. It also includes time information for when
89	// persons are matched in the video. You specify the input collection in an initial
90	// call to StartFaceSearch. Each Persons element includes a time the person was
91	// matched, face match details (FaceMatches) for matching faces in the collection,
92	// and person information (Person) for the matched person.
93	Persons []types.PersonMatch
94
95	// If the job fails, StatusMessage provides a descriptive error message.
96	StatusMessage *string
97
98	// Information about a video that Amazon Rekognition analyzed. Videometadata is
99	// returned in every page of paginated responses from a Amazon Rekognition Video
100	// operation.
101	VideoMetadata *types.VideoMetadata
102
103	// Metadata pertaining to the operation's result.
104	ResultMetadata middleware.Metadata
105}
106
107func addOperationGetFaceSearchMiddlewares(stack *middleware.Stack, options Options) (err error) {
108	err = stack.Serialize.Add(&awsAwsjson11_serializeOpGetFaceSearch{}, middleware.After)
109	if err != nil {
110		return err
111	}
112	err = stack.Deserialize.Add(&awsAwsjson11_deserializeOpGetFaceSearch{}, middleware.After)
113	if err != nil {
114		return err
115	}
116	if err = addSetLoggerMiddleware(stack, options); err != nil {
117		return err
118	}
119	if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
120		return err
121	}
122	if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
123		return err
124	}
125	if err = addResolveEndpointMiddleware(stack, options); err != nil {
126		return err
127	}
128	if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
129		return err
130	}
131	if err = addRetryMiddlewares(stack, options); err != nil {
132		return err
133	}
134	if err = addHTTPSignerV4Middleware(stack, options); err != nil {
135		return err
136	}
137	if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
138		return err
139	}
140	if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
141		return err
142	}
143	if err = addClientUserAgent(stack); err != nil {
144		return err
145	}
146	if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
147		return err
148	}
149	if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
150		return err
151	}
152	if err = addOpGetFaceSearchValidationMiddleware(stack); err != nil {
153		return err
154	}
155	if err = stack.Initialize.Add(newServiceMetadataMiddleware_opGetFaceSearch(options.Region), middleware.Before); err != nil {
156		return err
157	}
158	if err = addRequestIDRetrieverMiddleware(stack); err != nil {
159		return err
160	}
161	if err = addResponseErrorMiddleware(stack); err != nil {
162		return err
163	}
164	if err = addRequestResponseLogging(stack, options); err != nil {
165		return err
166	}
167	return nil
168}
169
170// GetFaceSearchAPIClient is a client that implements the GetFaceSearch operation.
171type GetFaceSearchAPIClient interface {
172	GetFaceSearch(context.Context, *GetFaceSearchInput, ...func(*Options)) (*GetFaceSearchOutput, error)
173}
174
175var _ GetFaceSearchAPIClient = (*Client)(nil)
176
177// GetFaceSearchPaginatorOptions is the paginator options for GetFaceSearch
178type GetFaceSearchPaginatorOptions struct {
179	// Maximum number of results to return per paginated call. The largest value you
180	// can specify is 1000. If you specify a value greater than 1000, a maximum of 1000
181	// results is returned. The default value is 1000.
182	Limit int32
183
184	// Set to true if pagination should stop if the service returns a pagination token
185	// that matches the most recent token provided to the service.
186	StopOnDuplicateToken bool
187}
188
189// GetFaceSearchPaginator is a paginator for GetFaceSearch
190type GetFaceSearchPaginator struct {
191	options   GetFaceSearchPaginatorOptions
192	client    GetFaceSearchAPIClient
193	params    *GetFaceSearchInput
194	nextToken *string
195	firstPage bool
196}
197
198// NewGetFaceSearchPaginator returns a new GetFaceSearchPaginator
199func NewGetFaceSearchPaginator(client GetFaceSearchAPIClient, params *GetFaceSearchInput, optFns ...func(*GetFaceSearchPaginatorOptions)) *GetFaceSearchPaginator {
200	if params == nil {
201		params = &GetFaceSearchInput{}
202	}
203
204	options := GetFaceSearchPaginatorOptions{}
205	if params.MaxResults != nil {
206		options.Limit = *params.MaxResults
207	}
208
209	for _, fn := range optFns {
210		fn(&options)
211	}
212
213	return &GetFaceSearchPaginator{
214		options:   options,
215		client:    client,
216		params:    params,
217		firstPage: true,
218	}
219}
220
221// HasMorePages returns a boolean indicating whether more pages are available
222func (p *GetFaceSearchPaginator) HasMorePages() bool {
223	return p.firstPage || p.nextToken != nil
224}
225
226// NextPage retrieves the next GetFaceSearch page.
227func (p *GetFaceSearchPaginator) NextPage(ctx context.Context, optFns ...func(*Options)) (*GetFaceSearchOutput, error) {
228	if !p.HasMorePages() {
229		return nil, fmt.Errorf("no more pages available")
230	}
231
232	params := *p.params
233	params.NextToken = p.nextToken
234
235	var limit *int32
236	if p.options.Limit > 0 {
237		limit = &p.options.Limit
238	}
239	params.MaxResults = limit
240
241	result, err := p.client.GetFaceSearch(ctx, &params, optFns...)
242	if err != nil {
243		return nil, err
244	}
245	p.firstPage = false
246
247	prevToken := p.nextToken
248	p.nextToken = result.NextToken
249
250	if p.options.StopOnDuplicateToken && prevToken != nil && p.nextToken != nil && *prevToken == *p.nextToken {
251		p.nextToken = nil
252	}
253
254	return result, nil
255}
256
257func newServiceMetadataMiddleware_opGetFaceSearch(region string) *awsmiddleware.RegisterServiceMetadata {
258	return &awsmiddleware.RegisterServiceMetadata{
259		Region:        region,
260		ServiceID:     ServiceID,
261		SigningName:   "rekognition",
262		OperationName: "GetFaceSearch",
263	}
264}
265