1// Package computervision implements the Azure ARM Computervision service API version 3.1. 2// 3// The Computer Vision API provides state-of-the-art algorithms to process images and return information. For example, 4// it can be used to determine if an image contains mature content, or it can be used to find all the faces in an 5// image. It also has other features like estimating dominant and accent colors, categorizing the content of images, 6// and describing an image with complete English sentences. Additionally, it can also intelligently generate images 7// thumbnails for displaying large images effectively. 8package computervision 9 10// Copyright (c) Microsoft and contributors. All rights reserved. 11// 12// Licensed under the Apache License, Version 2.0 (the "License"); 13// you may not use this file except in compliance with the License. 14// You may obtain a copy of the License at 15// http://www.apache.org/licenses/LICENSE-2.0 16// 17// Unless required by applicable law or agreed to in writing, software 18// distributed under the License is distributed on an "AS IS" BASIS, 19// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 20// 21// See the License for the specific language governing permissions and 22// limitations under the License. 23// 24// Code generated by Microsoft (R) AutoRest Code Generator. 25// Changes may cause incorrect behavior and will be lost if the code is regenerated. 26 27import ( 28 "context" 29 "github.com/Azure/go-autorest/autorest" 30 "github.com/Azure/go-autorest/autorest/azure" 31 "github.com/Azure/go-autorest/autorest/validation" 32 "github.com/Azure/go-autorest/tracing" 33 "github.com/satori/go.uuid" 34 "io" 35 "net/http" 36) 37 38// BaseClient is the base client for Computervision. 39type BaseClient struct { 40 autorest.Client 41 Endpoint string 42} 43 44// New creates an instance of the BaseClient client. 45func New(endpoint string) BaseClient { 46 return NewWithoutDefaults(endpoint) 47} 48 49// NewWithoutDefaults creates an instance of the BaseClient client. 50func NewWithoutDefaults(endpoint string) BaseClient { 51 return BaseClient{ 52 Client: autorest.NewClientWithUserAgent(UserAgent()), 53 Endpoint: endpoint, 54 } 55} 56 57// AnalyzeImage this operation extracts a rich set of visual features based on the image content. 58// Two input methods are supported -- (1) Uploading an image or (2) specifying an image URL. Within your request, there 59// is an optional parameter to allow you to choose which features to return. By default, image categories are returned 60// in the response. 61// A successful response will be returned in JSON. If the request failed, the response will contain an error code and a 62// message to help understand what went wrong. 63// Parameters: 64// imageURL - a JSON document with a URL pointing to the image that is to be analyzed. 65// visualFeatures - a string indicating what visual feature types to return. Multiple values should be 66// comma-separated. Valid visual feature types include: Categories - categorizes image content according to a 67// taxonomy defined in documentation. Tags - tags the image with a detailed list of words related to the image 68// content. Description - describes the image content with a complete English sentence. Faces - detects if 69// faces are present. If present, generate coordinates, gender and age. ImageType - detects if image is clipart 70// or a line drawing. Color - determines the accent color, dominant color, and whether an image is black&white. 71// Adult - detects if the image is pornographic in nature (depicts nudity or a sex act), or is gory (depicts 72// extreme violence or blood). Sexually suggestive content (aka racy content) is also detected. Objects - 73// detects various objects within an image, including the approximate location. The Objects argument is only 74// available in English. Brands - detects various brands within an image, including the approximate location. 75// The Brands argument is only available in English. 76// details - a string indicating which domain-specific details to return. Multiple values should be 77// comma-separated. Valid visual feature types include: Celebrities - identifies celebrities if detected in the 78// image, Landmarks - identifies notable landmarks in the image. 79// language - the desired language for output generation. If this parameter is not specified, the default value 80// is "en".Supported languages:en - English, Default. es - Spanish, ja - Japanese, pt - Portuguese, 81// zh - Simplified Chinese. 82// descriptionExclude - turn off specified domain models when generating the description. 83func (client BaseClient) AnalyzeImage(ctx context.Context, imageURL ImageURL, visualFeatures []VisualFeatureTypes, details []Details, language string, descriptionExclude []DescriptionExclude) (result ImageAnalysis, err error) { 84 if tracing.IsEnabled() { 85 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.AnalyzeImage") 86 defer func() { 87 sc := -1 88 if result.Response.Response != nil { 89 sc = result.Response.Response.StatusCode 90 } 91 tracing.EndSpan(ctx, sc, err) 92 }() 93 } 94 if err := validation.Validate([]validation.Validation{ 95 {TargetValue: imageURL, 96 Constraints: []validation.Constraint{{Target: "imageURL.URL", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil { 97 return result, validation.NewError("computervision.BaseClient", "AnalyzeImage", err.Error()) 98 } 99 100 req, err := client.AnalyzeImagePreparer(ctx, imageURL, visualFeatures, details, language, descriptionExclude) 101 if err != nil { 102 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "AnalyzeImage", nil, "Failure preparing request") 103 return 104 } 105 106 resp, err := client.AnalyzeImageSender(req) 107 if err != nil { 108 result.Response = autorest.Response{Response: resp} 109 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "AnalyzeImage", resp, "Failure sending request") 110 return 111 } 112 113 result, err = client.AnalyzeImageResponder(resp) 114 if err != nil { 115 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "AnalyzeImage", resp, "Failure responding to request") 116 return 117 } 118 119 return 120} 121 122// AnalyzeImagePreparer prepares the AnalyzeImage request. 123func (client BaseClient) AnalyzeImagePreparer(ctx context.Context, imageURL ImageURL, visualFeatures []VisualFeatureTypes, details []Details, language string, descriptionExclude []DescriptionExclude) (*http.Request, error) { 124 urlParameters := map[string]interface{}{ 125 "Endpoint": client.Endpoint, 126 } 127 128 queryParameters := map[string]interface{}{} 129 if visualFeatures != nil && len(visualFeatures) > 0 { 130 queryParameters["visualFeatures"] = autorest.Encode("query", visualFeatures, ",") 131 } 132 if details != nil && len(details) > 0 { 133 queryParameters["details"] = autorest.Encode("query", details, ",") 134 } 135 if len(string(language)) > 0 { 136 queryParameters["language"] = autorest.Encode("query", language) 137 } else { 138 queryParameters["language"] = autorest.Encode("query", "en") 139 } 140 if descriptionExclude != nil && len(descriptionExclude) > 0 { 141 queryParameters["descriptionExclude"] = autorest.Encode("query", descriptionExclude, ",") 142 } 143 144 preparer := autorest.CreatePreparer( 145 autorest.AsContentType("application/json; charset=utf-8"), 146 autorest.AsPost(), 147 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 148 autorest.WithPath("/analyze"), 149 autorest.WithJSON(imageURL), 150 autorest.WithQueryParameters(queryParameters)) 151 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 152} 153 154// AnalyzeImageSender sends the AnalyzeImage request. The method will close the 155// http.Response Body if it receives an error. 156func (client BaseClient) AnalyzeImageSender(req *http.Request) (*http.Response, error) { 157 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 158} 159 160// AnalyzeImageResponder handles the response to the AnalyzeImage request. The method always 161// closes the http.Response Body. 162func (client BaseClient) AnalyzeImageResponder(resp *http.Response) (result ImageAnalysis, err error) { 163 err = autorest.Respond( 164 resp, 165 azure.WithErrorUnlessStatusCode(http.StatusOK), 166 autorest.ByUnmarshallingJSON(&result), 167 autorest.ByClosing()) 168 result.Response = autorest.Response{Response: resp} 169 return 170} 171 172// AnalyzeImageByDomain this operation recognizes content within an image by applying a domain-specific model. The list 173// of domain-specific models that are supported by the Computer Vision API can be retrieved using the /models GET 174// request. Currently, the API provides following domain-specific models: celebrities, landmarks. 175// Two input methods are supported -- (1) Uploading an image or (2) specifying an image URL. 176// A successful response will be returned in JSON. 177// If the request failed, the response will contain an error code and a message to help understand what went wrong. 178// Parameters: 179// model - the domain-specific content to recognize. 180// imageURL - a JSON document with a URL pointing to the image that is to be analyzed. 181// language - the desired language for output generation. If this parameter is not specified, the default value 182// is "en".Supported languages:en - English, Default. es - Spanish, ja - Japanese, pt - Portuguese, 183// zh - Simplified Chinese. 184func (client BaseClient) AnalyzeImageByDomain(ctx context.Context, model string, imageURL ImageURL, language string) (result DomainModelResults, err error) { 185 if tracing.IsEnabled() { 186 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.AnalyzeImageByDomain") 187 defer func() { 188 sc := -1 189 if result.Response.Response != nil { 190 sc = result.Response.Response.StatusCode 191 } 192 tracing.EndSpan(ctx, sc, err) 193 }() 194 } 195 if err := validation.Validate([]validation.Validation{ 196 {TargetValue: imageURL, 197 Constraints: []validation.Constraint{{Target: "imageURL.URL", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil { 198 return result, validation.NewError("computervision.BaseClient", "AnalyzeImageByDomain", err.Error()) 199 } 200 201 req, err := client.AnalyzeImageByDomainPreparer(ctx, model, imageURL, language) 202 if err != nil { 203 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "AnalyzeImageByDomain", nil, "Failure preparing request") 204 return 205 } 206 207 resp, err := client.AnalyzeImageByDomainSender(req) 208 if err != nil { 209 result.Response = autorest.Response{Response: resp} 210 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "AnalyzeImageByDomain", resp, "Failure sending request") 211 return 212 } 213 214 result, err = client.AnalyzeImageByDomainResponder(resp) 215 if err != nil { 216 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "AnalyzeImageByDomain", resp, "Failure responding to request") 217 return 218 } 219 220 return 221} 222 223// AnalyzeImageByDomainPreparer prepares the AnalyzeImageByDomain request. 224func (client BaseClient) AnalyzeImageByDomainPreparer(ctx context.Context, model string, imageURL ImageURL, language string) (*http.Request, error) { 225 urlParameters := map[string]interface{}{ 226 "Endpoint": client.Endpoint, 227 } 228 229 pathParameters := map[string]interface{}{ 230 "model": autorest.Encode("path", model), 231 } 232 233 queryParameters := map[string]interface{}{} 234 if len(string(language)) > 0 { 235 queryParameters["language"] = autorest.Encode("query", language) 236 } else { 237 queryParameters["language"] = autorest.Encode("query", "en") 238 } 239 240 preparer := autorest.CreatePreparer( 241 autorest.AsContentType("application/json; charset=utf-8"), 242 autorest.AsPost(), 243 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 244 autorest.WithPathParameters("/models/{model}/analyze", pathParameters), 245 autorest.WithJSON(imageURL), 246 autorest.WithQueryParameters(queryParameters)) 247 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 248} 249 250// AnalyzeImageByDomainSender sends the AnalyzeImageByDomain request. The method will close the 251// http.Response Body if it receives an error. 252func (client BaseClient) AnalyzeImageByDomainSender(req *http.Request) (*http.Response, error) { 253 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 254} 255 256// AnalyzeImageByDomainResponder handles the response to the AnalyzeImageByDomain request. The method always 257// closes the http.Response Body. 258func (client BaseClient) AnalyzeImageByDomainResponder(resp *http.Response) (result DomainModelResults, err error) { 259 err = autorest.Respond( 260 resp, 261 azure.WithErrorUnlessStatusCode(http.StatusOK), 262 autorest.ByUnmarshallingJSON(&result), 263 autorest.ByClosing()) 264 result.Response = autorest.Response{Response: resp} 265 return 266} 267 268// AnalyzeImageByDomainInStream this operation recognizes content within an image by applying a domain-specific model. 269// The list of domain-specific models that are supported by the Computer Vision API can be retrieved using the /models 270// GET request. Currently, the API provides following domain-specific models: celebrities, landmarks. 271// Two input methods are supported -- (1) Uploading an image or (2) specifying an image URL. 272// A successful response will be returned in JSON. 273// If the request failed, the response will contain an error code and a message to help understand what went wrong. 274// Parameters: 275// model - the domain-specific content to recognize. 276// imageParameter - an image stream. 277// language - the desired language for output generation. If this parameter is not specified, the default value 278// is "en".Supported languages:en - English, Default. es - Spanish, ja - Japanese, pt - Portuguese, 279// zh - Simplified Chinese. 280func (client BaseClient) AnalyzeImageByDomainInStream(ctx context.Context, model string, imageParameter io.ReadCloser, language string) (result DomainModelResults, err error) { 281 if tracing.IsEnabled() { 282 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.AnalyzeImageByDomainInStream") 283 defer func() { 284 sc := -1 285 if result.Response.Response != nil { 286 sc = result.Response.Response.StatusCode 287 } 288 tracing.EndSpan(ctx, sc, err) 289 }() 290 } 291 req, err := client.AnalyzeImageByDomainInStreamPreparer(ctx, model, imageParameter, language) 292 if err != nil { 293 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "AnalyzeImageByDomainInStream", nil, "Failure preparing request") 294 return 295 } 296 297 resp, err := client.AnalyzeImageByDomainInStreamSender(req) 298 if err != nil { 299 result.Response = autorest.Response{Response: resp} 300 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "AnalyzeImageByDomainInStream", resp, "Failure sending request") 301 return 302 } 303 304 result, err = client.AnalyzeImageByDomainInStreamResponder(resp) 305 if err != nil { 306 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "AnalyzeImageByDomainInStream", resp, "Failure responding to request") 307 return 308 } 309 310 return 311} 312 313// AnalyzeImageByDomainInStreamPreparer prepares the AnalyzeImageByDomainInStream request. 314func (client BaseClient) AnalyzeImageByDomainInStreamPreparer(ctx context.Context, model string, imageParameter io.ReadCloser, language string) (*http.Request, error) { 315 urlParameters := map[string]interface{}{ 316 "Endpoint": client.Endpoint, 317 } 318 319 pathParameters := map[string]interface{}{ 320 "model": autorest.Encode("path", model), 321 } 322 323 queryParameters := map[string]interface{}{} 324 if len(string(language)) > 0 { 325 queryParameters["language"] = autorest.Encode("query", language) 326 } else { 327 queryParameters["language"] = autorest.Encode("query", "en") 328 } 329 330 preparer := autorest.CreatePreparer( 331 autorest.AsContentType("application/octet-stream"), 332 autorest.AsPost(), 333 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 334 autorest.WithPathParameters("/models/{model}/analyze", pathParameters), 335 autorest.WithFile(imageParameter), 336 autorest.WithQueryParameters(queryParameters)) 337 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 338} 339 340// AnalyzeImageByDomainInStreamSender sends the AnalyzeImageByDomainInStream request. The method will close the 341// http.Response Body if it receives an error. 342func (client BaseClient) AnalyzeImageByDomainInStreamSender(req *http.Request) (*http.Response, error) { 343 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 344} 345 346// AnalyzeImageByDomainInStreamResponder handles the response to the AnalyzeImageByDomainInStream request. The method always 347// closes the http.Response Body. 348func (client BaseClient) AnalyzeImageByDomainInStreamResponder(resp *http.Response) (result DomainModelResults, err error) { 349 err = autorest.Respond( 350 resp, 351 azure.WithErrorUnlessStatusCode(http.StatusOK), 352 autorest.ByUnmarshallingJSON(&result), 353 autorest.ByClosing()) 354 result.Response = autorest.Response{Response: resp} 355 return 356} 357 358// AnalyzeImageInStream this operation extracts a rich set of visual features based on the image content. 359// Two input methods are supported -- (1) Uploading an image or (2) specifying an image URL. Within your request, there 360// is an optional parameter to allow you to choose which features to return. By default, image categories are returned 361// in the response. 362// A successful response will be returned in JSON. If the request failed, the response will contain an error code and a 363// message to help understand what went wrong. 364// Parameters: 365// imageParameter - an image stream. 366// visualFeatures - a string indicating what visual feature types to return. Multiple values should be 367// comma-separated. Valid visual feature types include: Categories - categorizes image content according to a 368// taxonomy defined in documentation. Tags - tags the image with a detailed list of words related to the image 369// content. Description - describes the image content with a complete English sentence. Faces - detects if 370// faces are present. If present, generate coordinates, gender and age. ImageType - detects if image is clipart 371// or a line drawing. Color - determines the accent color, dominant color, and whether an image is black&white. 372// Adult - detects if the image is pornographic in nature (depicts nudity or a sex act), or is gory (depicts 373// extreme violence or blood). Sexually suggestive content (aka racy content) is also detected. Objects - 374// detects various objects within an image, including the approximate location. The Objects argument is only 375// available in English. Brands - detects various brands within an image, including the approximate location. 376// The Brands argument is only available in English. 377// details - a string indicating which domain-specific details to return. Multiple values should be 378// comma-separated. Valid visual feature types include: Celebrities - identifies celebrities if detected in the 379// image, Landmarks - identifies notable landmarks in the image. 380// language - the desired language for output generation. If this parameter is not specified, the default value 381// is "en".Supported languages:en - English, Default. es - Spanish, ja - Japanese, pt - Portuguese, 382// zh - Simplified Chinese. 383// descriptionExclude - turn off specified domain models when generating the description. 384func (client BaseClient) AnalyzeImageInStream(ctx context.Context, imageParameter io.ReadCloser, visualFeatures []VisualFeatureTypes, details []Details, language string, descriptionExclude []DescriptionExclude) (result ImageAnalysis, err error) { 385 if tracing.IsEnabled() { 386 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.AnalyzeImageInStream") 387 defer func() { 388 sc := -1 389 if result.Response.Response != nil { 390 sc = result.Response.Response.StatusCode 391 } 392 tracing.EndSpan(ctx, sc, err) 393 }() 394 } 395 req, err := client.AnalyzeImageInStreamPreparer(ctx, imageParameter, visualFeatures, details, language, descriptionExclude) 396 if err != nil { 397 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "AnalyzeImageInStream", nil, "Failure preparing request") 398 return 399 } 400 401 resp, err := client.AnalyzeImageInStreamSender(req) 402 if err != nil { 403 result.Response = autorest.Response{Response: resp} 404 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "AnalyzeImageInStream", resp, "Failure sending request") 405 return 406 } 407 408 result, err = client.AnalyzeImageInStreamResponder(resp) 409 if err != nil { 410 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "AnalyzeImageInStream", resp, "Failure responding to request") 411 return 412 } 413 414 return 415} 416 417// AnalyzeImageInStreamPreparer prepares the AnalyzeImageInStream request. 418func (client BaseClient) AnalyzeImageInStreamPreparer(ctx context.Context, imageParameter io.ReadCloser, visualFeatures []VisualFeatureTypes, details []Details, language string, descriptionExclude []DescriptionExclude) (*http.Request, error) { 419 urlParameters := map[string]interface{}{ 420 "Endpoint": client.Endpoint, 421 } 422 423 queryParameters := map[string]interface{}{} 424 if visualFeatures != nil && len(visualFeatures) > 0 { 425 queryParameters["visualFeatures"] = autorest.Encode("query", visualFeatures, ",") 426 } 427 if details != nil && len(details) > 0 { 428 queryParameters["details"] = autorest.Encode("query", details, ",") 429 } 430 if len(string(language)) > 0 { 431 queryParameters["language"] = autorest.Encode("query", language) 432 } else { 433 queryParameters["language"] = autorest.Encode("query", "en") 434 } 435 if descriptionExclude != nil && len(descriptionExclude) > 0 { 436 queryParameters["descriptionExclude"] = autorest.Encode("query", descriptionExclude, ",") 437 } 438 439 preparer := autorest.CreatePreparer( 440 autorest.AsContentType("application/octet-stream"), 441 autorest.AsPost(), 442 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 443 autorest.WithPath("/analyze"), 444 autorest.WithFile(imageParameter), 445 autorest.WithQueryParameters(queryParameters)) 446 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 447} 448 449// AnalyzeImageInStreamSender sends the AnalyzeImageInStream request. The method will close the 450// http.Response Body if it receives an error. 451func (client BaseClient) AnalyzeImageInStreamSender(req *http.Request) (*http.Response, error) { 452 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 453} 454 455// AnalyzeImageInStreamResponder handles the response to the AnalyzeImageInStream request. The method always 456// closes the http.Response Body. 457func (client BaseClient) AnalyzeImageInStreamResponder(resp *http.Response) (result ImageAnalysis, err error) { 458 err = autorest.Respond( 459 resp, 460 azure.WithErrorUnlessStatusCode(http.StatusOK), 461 autorest.ByUnmarshallingJSON(&result), 462 autorest.ByClosing()) 463 result.Response = autorest.Response{Response: resp} 464 return 465} 466 467// DescribeImage this operation generates a description of an image in human readable language with complete sentences. 468// The description is based on a collection of content tags, which are also returned by the operation. More than one 469// description can be generated for each image. Descriptions are ordered by their confidence score. Descriptions may 470// include results from celebrity and landmark domain models, if applicable. 471// Two input methods are supported -- (1) Uploading an image or (2) specifying an image URL. 472// A successful response will be returned in JSON. If the request failed, the response will contain an error code and a 473// message to help understand what went wrong. 474// Parameters: 475// imageURL - a JSON document with a URL pointing to the image that is to be analyzed. 476// maxCandidates - maximum number of candidate descriptions to be returned. The default is 1. 477// language - the desired language for output generation. If this parameter is not specified, the default value 478// is "en".Supported languages:en - English, Default. es - Spanish, ja - Japanese, pt - Portuguese, 479// zh - Simplified Chinese. 480// descriptionExclude - turn off specified domain models when generating the description. 481func (client BaseClient) DescribeImage(ctx context.Context, imageURL ImageURL, maxCandidates *int32, language string, descriptionExclude []DescriptionExclude) (result ImageDescription, err error) { 482 if tracing.IsEnabled() { 483 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.DescribeImage") 484 defer func() { 485 sc := -1 486 if result.Response.Response != nil { 487 sc = result.Response.Response.StatusCode 488 } 489 tracing.EndSpan(ctx, sc, err) 490 }() 491 } 492 if err := validation.Validate([]validation.Validation{ 493 {TargetValue: imageURL, 494 Constraints: []validation.Constraint{{Target: "imageURL.URL", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil { 495 return result, validation.NewError("computervision.BaseClient", "DescribeImage", err.Error()) 496 } 497 498 req, err := client.DescribeImagePreparer(ctx, imageURL, maxCandidates, language, descriptionExclude) 499 if err != nil { 500 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "DescribeImage", nil, "Failure preparing request") 501 return 502 } 503 504 resp, err := client.DescribeImageSender(req) 505 if err != nil { 506 result.Response = autorest.Response{Response: resp} 507 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "DescribeImage", resp, "Failure sending request") 508 return 509 } 510 511 result, err = client.DescribeImageResponder(resp) 512 if err != nil { 513 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "DescribeImage", resp, "Failure responding to request") 514 return 515 } 516 517 return 518} 519 520// DescribeImagePreparer prepares the DescribeImage request. 521func (client BaseClient) DescribeImagePreparer(ctx context.Context, imageURL ImageURL, maxCandidates *int32, language string, descriptionExclude []DescriptionExclude) (*http.Request, error) { 522 urlParameters := map[string]interface{}{ 523 "Endpoint": client.Endpoint, 524 } 525 526 queryParameters := map[string]interface{}{} 527 if maxCandidates != nil { 528 queryParameters["maxCandidates"] = autorest.Encode("query", *maxCandidates) 529 } else { 530 queryParameters["maxCandidates"] = autorest.Encode("query", 1) 531 } 532 if len(string(language)) > 0 { 533 queryParameters["language"] = autorest.Encode("query", language) 534 } else { 535 queryParameters["language"] = autorest.Encode("query", "en") 536 } 537 if descriptionExclude != nil && len(descriptionExclude) > 0 { 538 queryParameters["descriptionExclude"] = autorest.Encode("query", descriptionExclude, ",") 539 } 540 541 preparer := autorest.CreatePreparer( 542 autorest.AsContentType("application/json; charset=utf-8"), 543 autorest.AsPost(), 544 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 545 autorest.WithPath("/describe"), 546 autorest.WithJSON(imageURL), 547 autorest.WithQueryParameters(queryParameters)) 548 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 549} 550 551// DescribeImageSender sends the DescribeImage request. The method will close the 552// http.Response Body if it receives an error. 553func (client BaseClient) DescribeImageSender(req *http.Request) (*http.Response, error) { 554 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 555} 556 557// DescribeImageResponder handles the response to the DescribeImage request. The method always 558// closes the http.Response Body. 559func (client BaseClient) DescribeImageResponder(resp *http.Response) (result ImageDescription, err error) { 560 err = autorest.Respond( 561 resp, 562 azure.WithErrorUnlessStatusCode(http.StatusOK), 563 autorest.ByUnmarshallingJSON(&result), 564 autorest.ByClosing()) 565 result.Response = autorest.Response{Response: resp} 566 return 567} 568 569// DescribeImageInStream this operation generates a description of an image in human readable language with complete 570// sentences. The description is based on a collection of content tags, which are also returned by the operation. More 571// than one description can be generated for each image. Descriptions are ordered by their confidence score. 572// Descriptions may include results from celebrity and landmark domain models, if applicable. 573// Two input methods are supported -- (1) Uploading an image or (2) specifying an image URL. 574// A successful response will be returned in JSON. If the request failed, the response will contain an error code and a 575// message to help understand what went wrong. 576// Parameters: 577// imageParameter - an image stream. 578// maxCandidates - maximum number of candidate descriptions to be returned. The default is 1. 579// language - the desired language for output generation. If this parameter is not specified, the default value 580// is "en".Supported languages:en - English, Default. es - Spanish, ja - Japanese, pt - Portuguese, 581// zh - Simplified Chinese. 582// descriptionExclude - turn off specified domain models when generating the description. 583func (client BaseClient) DescribeImageInStream(ctx context.Context, imageParameter io.ReadCloser, maxCandidates *int32, language string, descriptionExclude []DescriptionExclude) (result ImageDescription, err error) { 584 if tracing.IsEnabled() { 585 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.DescribeImageInStream") 586 defer func() { 587 sc := -1 588 if result.Response.Response != nil { 589 sc = result.Response.Response.StatusCode 590 } 591 tracing.EndSpan(ctx, sc, err) 592 }() 593 } 594 req, err := client.DescribeImageInStreamPreparer(ctx, imageParameter, maxCandidates, language, descriptionExclude) 595 if err != nil { 596 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "DescribeImageInStream", nil, "Failure preparing request") 597 return 598 } 599 600 resp, err := client.DescribeImageInStreamSender(req) 601 if err != nil { 602 result.Response = autorest.Response{Response: resp} 603 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "DescribeImageInStream", resp, "Failure sending request") 604 return 605 } 606 607 result, err = client.DescribeImageInStreamResponder(resp) 608 if err != nil { 609 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "DescribeImageInStream", resp, "Failure responding to request") 610 return 611 } 612 613 return 614} 615 616// DescribeImageInStreamPreparer prepares the DescribeImageInStream request. 617func (client BaseClient) DescribeImageInStreamPreparer(ctx context.Context, imageParameter io.ReadCloser, maxCandidates *int32, language string, descriptionExclude []DescriptionExclude) (*http.Request, error) { 618 urlParameters := map[string]interface{}{ 619 "Endpoint": client.Endpoint, 620 } 621 622 queryParameters := map[string]interface{}{} 623 if maxCandidates != nil { 624 queryParameters["maxCandidates"] = autorest.Encode("query", *maxCandidates) 625 } else { 626 queryParameters["maxCandidates"] = autorest.Encode("query", 1) 627 } 628 if len(string(language)) > 0 { 629 queryParameters["language"] = autorest.Encode("query", language) 630 } else { 631 queryParameters["language"] = autorest.Encode("query", "en") 632 } 633 if descriptionExclude != nil && len(descriptionExclude) > 0 { 634 queryParameters["descriptionExclude"] = autorest.Encode("query", descriptionExclude, ",") 635 } 636 637 preparer := autorest.CreatePreparer( 638 autorest.AsContentType("application/octet-stream"), 639 autorest.AsPost(), 640 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 641 autorest.WithPath("/describe"), 642 autorest.WithFile(imageParameter), 643 autorest.WithQueryParameters(queryParameters)) 644 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 645} 646 647// DescribeImageInStreamSender sends the DescribeImageInStream request. The method will close the 648// http.Response Body if it receives an error. 649func (client BaseClient) DescribeImageInStreamSender(req *http.Request) (*http.Response, error) { 650 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 651} 652 653// DescribeImageInStreamResponder handles the response to the DescribeImageInStream request. The method always 654// closes the http.Response Body. 655func (client BaseClient) DescribeImageInStreamResponder(resp *http.Response) (result ImageDescription, err error) { 656 err = autorest.Respond( 657 resp, 658 azure.WithErrorUnlessStatusCode(http.StatusOK), 659 autorest.ByUnmarshallingJSON(&result), 660 autorest.ByClosing()) 661 result.Response = autorest.Response{Response: resp} 662 return 663} 664 665// DetectObjects performs object detection on the specified image. 666// Two input methods are supported -- (1) Uploading an image or (2) specifying an image URL. 667// A successful response will be returned in JSON. If the request failed, the response will contain an error code and a 668// message to help understand what went wrong. 669// Parameters: 670// imageURL - a JSON document with a URL pointing to the image that is to be analyzed. 671func (client BaseClient) DetectObjects(ctx context.Context, imageURL ImageURL) (result DetectResult, err error) { 672 if tracing.IsEnabled() { 673 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.DetectObjects") 674 defer func() { 675 sc := -1 676 if result.Response.Response != nil { 677 sc = result.Response.Response.StatusCode 678 } 679 tracing.EndSpan(ctx, sc, err) 680 }() 681 } 682 if err := validation.Validate([]validation.Validation{ 683 {TargetValue: imageURL, 684 Constraints: []validation.Constraint{{Target: "imageURL.URL", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil { 685 return result, validation.NewError("computervision.BaseClient", "DetectObjects", err.Error()) 686 } 687 688 req, err := client.DetectObjectsPreparer(ctx, imageURL) 689 if err != nil { 690 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "DetectObjects", nil, "Failure preparing request") 691 return 692 } 693 694 resp, err := client.DetectObjectsSender(req) 695 if err != nil { 696 result.Response = autorest.Response{Response: resp} 697 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "DetectObjects", resp, "Failure sending request") 698 return 699 } 700 701 result, err = client.DetectObjectsResponder(resp) 702 if err != nil { 703 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "DetectObjects", resp, "Failure responding to request") 704 return 705 } 706 707 return 708} 709 710// DetectObjectsPreparer prepares the DetectObjects request. 711func (client BaseClient) DetectObjectsPreparer(ctx context.Context, imageURL ImageURL) (*http.Request, error) { 712 urlParameters := map[string]interface{}{ 713 "Endpoint": client.Endpoint, 714 } 715 716 preparer := autorest.CreatePreparer( 717 autorest.AsContentType("application/json; charset=utf-8"), 718 autorest.AsPost(), 719 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 720 autorest.WithPath("/detect"), 721 autorest.WithJSON(imageURL)) 722 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 723} 724 725// DetectObjectsSender sends the DetectObjects request. The method will close the 726// http.Response Body if it receives an error. 727func (client BaseClient) DetectObjectsSender(req *http.Request) (*http.Response, error) { 728 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 729} 730 731// DetectObjectsResponder handles the response to the DetectObjects request. The method always 732// closes the http.Response Body. 733func (client BaseClient) DetectObjectsResponder(resp *http.Response) (result DetectResult, err error) { 734 err = autorest.Respond( 735 resp, 736 azure.WithErrorUnlessStatusCode(http.StatusOK), 737 autorest.ByUnmarshallingJSON(&result), 738 autorest.ByClosing()) 739 result.Response = autorest.Response{Response: resp} 740 return 741} 742 743// DetectObjectsInStream performs object detection on the specified image. 744// Two input methods are supported -- (1) Uploading an image or (2) specifying an image URL. 745// A successful response will be returned in JSON. If the request failed, the response will contain an error code and a 746// message to help understand what went wrong. 747// Parameters: 748// imageParameter - an image stream. 749func (client BaseClient) DetectObjectsInStream(ctx context.Context, imageParameter io.ReadCloser) (result DetectResult, err error) { 750 if tracing.IsEnabled() { 751 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.DetectObjectsInStream") 752 defer func() { 753 sc := -1 754 if result.Response.Response != nil { 755 sc = result.Response.Response.StatusCode 756 } 757 tracing.EndSpan(ctx, sc, err) 758 }() 759 } 760 req, err := client.DetectObjectsInStreamPreparer(ctx, imageParameter) 761 if err != nil { 762 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "DetectObjectsInStream", nil, "Failure preparing request") 763 return 764 } 765 766 resp, err := client.DetectObjectsInStreamSender(req) 767 if err != nil { 768 result.Response = autorest.Response{Response: resp} 769 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "DetectObjectsInStream", resp, "Failure sending request") 770 return 771 } 772 773 result, err = client.DetectObjectsInStreamResponder(resp) 774 if err != nil { 775 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "DetectObjectsInStream", resp, "Failure responding to request") 776 return 777 } 778 779 return 780} 781 782// DetectObjectsInStreamPreparer prepares the DetectObjectsInStream request. 783func (client BaseClient) DetectObjectsInStreamPreparer(ctx context.Context, imageParameter io.ReadCloser) (*http.Request, error) { 784 urlParameters := map[string]interface{}{ 785 "Endpoint": client.Endpoint, 786 } 787 788 preparer := autorest.CreatePreparer( 789 autorest.AsContentType("application/octet-stream"), 790 autorest.AsPost(), 791 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 792 autorest.WithPath("/detect"), 793 autorest.WithFile(imageParameter)) 794 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 795} 796 797// DetectObjectsInStreamSender sends the DetectObjectsInStream request. The method will close the 798// http.Response Body if it receives an error. 799func (client BaseClient) DetectObjectsInStreamSender(req *http.Request) (*http.Response, error) { 800 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 801} 802 803// DetectObjectsInStreamResponder handles the response to the DetectObjectsInStream request. The method always 804// closes the http.Response Body. 805func (client BaseClient) DetectObjectsInStreamResponder(resp *http.Response) (result DetectResult, err error) { 806 err = autorest.Respond( 807 resp, 808 azure.WithErrorUnlessStatusCode(http.StatusOK), 809 autorest.ByUnmarshallingJSON(&result), 810 autorest.ByClosing()) 811 result.Response = autorest.Response{Response: resp} 812 return 813} 814 815// GenerateThumbnail this operation generates a thumbnail image with the user-specified width and height. By default, 816// the service analyzes the image, identifies the region of interest (ROI), and generates smart cropping coordinates 817// based on the ROI. Smart cropping helps when you specify an aspect ratio that differs from that of the input image. 818// A successful response contains the thumbnail image binary. If the request failed, the response contains an error 819// code and a message to help determine what went wrong. 820// Upon failure, the error code and an error message are returned. The error code could be one of InvalidImageUrl, 821// InvalidImageFormat, InvalidImageSize, InvalidThumbnailSize, NotSupportedImage, FailedToProcess, Timeout, or 822// InternalServerError. 823// Parameters: 824// width - width of the thumbnail, in pixels. It must be between 1 and 1024. Recommended minimum of 50. 825// height - height of the thumbnail, in pixels. It must be between 1 and 1024. Recommended minimum of 50. 826// imageURL - a JSON document with a URL pointing to the image that is to be analyzed. 827// smartCropping - boolean flag for enabling smart cropping. 828func (client BaseClient) GenerateThumbnail(ctx context.Context, width int32, height int32, imageURL ImageURL, smartCropping *bool) (result ReadCloser, err error) { 829 if tracing.IsEnabled() { 830 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.GenerateThumbnail") 831 defer func() { 832 sc := -1 833 if result.Response.Response != nil { 834 sc = result.Response.Response.StatusCode 835 } 836 tracing.EndSpan(ctx, sc, err) 837 }() 838 } 839 if err := validation.Validate([]validation.Validation{ 840 {TargetValue: width, 841 Constraints: []validation.Constraint{{Target: "width", Name: validation.InclusiveMaximum, Rule: int64(1024), Chain: nil}, 842 {Target: "width", Name: validation.InclusiveMinimum, Rule: int64(1), Chain: nil}}}, 843 {TargetValue: height, 844 Constraints: []validation.Constraint{{Target: "height", Name: validation.InclusiveMaximum, Rule: int64(1024), Chain: nil}, 845 {Target: "height", Name: validation.InclusiveMinimum, Rule: int64(1), Chain: nil}}}, 846 {TargetValue: imageURL, 847 Constraints: []validation.Constraint{{Target: "imageURL.URL", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil { 848 return result, validation.NewError("computervision.BaseClient", "GenerateThumbnail", err.Error()) 849 } 850 851 req, err := client.GenerateThumbnailPreparer(ctx, width, height, imageURL, smartCropping) 852 if err != nil { 853 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "GenerateThumbnail", nil, "Failure preparing request") 854 return 855 } 856 857 resp, err := client.GenerateThumbnailSender(req) 858 if err != nil { 859 result.Response = autorest.Response{Response: resp} 860 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "GenerateThumbnail", resp, "Failure sending request") 861 return 862 } 863 864 result, err = client.GenerateThumbnailResponder(resp) 865 if err != nil { 866 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "GenerateThumbnail", resp, "Failure responding to request") 867 return 868 } 869 870 return 871} 872 873// GenerateThumbnailPreparer prepares the GenerateThumbnail request. 874func (client BaseClient) GenerateThumbnailPreparer(ctx context.Context, width int32, height int32, imageURL ImageURL, smartCropping *bool) (*http.Request, error) { 875 urlParameters := map[string]interface{}{ 876 "Endpoint": client.Endpoint, 877 } 878 879 queryParameters := map[string]interface{}{ 880 "height": autorest.Encode("query", height), 881 "width": autorest.Encode("query", width), 882 } 883 if smartCropping != nil { 884 queryParameters["smartCropping"] = autorest.Encode("query", *smartCropping) 885 } else { 886 queryParameters["smartCropping"] = autorest.Encode("query", false) 887 } 888 889 preparer := autorest.CreatePreparer( 890 autorest.AsContentType("application/json; charset=utf-8"), 891 autorest.AsPost(), 892 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 893 autorest.WithPath("/generateThumbnail"), 894 autorest.WithJSON(imageURL), 895 autorest.WithQueryParameters(queryParameters)) 896 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 897} 898 899// GenerateThumbnailSender sends the GenerateThumbnail request. The method will close the 900// http.Response Body if it receives an error. 901func (client BaseClient) GenerateThumbnailSender(req *http.Request) (*http.Response, error) { 902 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 903} 904 905// GenerateThumbnailResponder handles the response to the GenerateThumbnail request. The method always 906// closes the http.Response Body. 907func (client BaseClient) GenerateThumbnailResponder(resp *http.Response) (result ReadCloser, err error) { 908 result.Value = &resp.Body 909 err = autorest.Respond( 910 resp, 911 azure.WithErrorUnlessStatusCode(http.StatusOK)) 912 result.Response = autorest.Response{Response: resp} 913 return 914} 915 916// GenerateThumbnailInStream this operation generates a thumbnail image with the user-specified width and height. By 917// default, the service analyzes the image, identifies the region of interest (ROI), and generates smart cropping 918// coordinates based on the ROI. Smart cropping helps when you specify an aspect ratio that differs from that of the 919// input image. 920// A successful response contains the thumbnail image binary. If the request failed, the response contains an error 921// code and a message to help determine what went wrong. 922// Upon failure, the error code and an error message are returned. The error code could be one of InvalidImageUrl, 923// InvalidImageFormat, InvalidImageSize, InvalidThumbnailSize, NotSupportedImage, FailedToProcess, Timeout, or 924// InternalServerError. 925// Parameters: 926// width - width of the thumbnail, in pixels. It must be between 1 and 1024. Recommended minimum of 50. 927// height - height of the thumbnail, in pixels. It must be between 1 and 1024. Recommended minimum of 50. 928// imageParameter - an image stream. 929// smartCropping - boolean flag for enabling smart cropping. 930func (client BaseClient) GenerateThumbnailInStream(ctx context.Context, width int32, height int32, imageParameter io.ReadCloser, smartCropping *bool) (result ReadCloser, err error) { 931 if tracing.IsEnabled() { 932 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.GenerateThumbnailInStream") 933 defer func() { 934 sc := -1 935 if result.Response.Response != nil { 936 sc = result.Response.Response.StatusCode 937 } 938 tracing.EndSpan(ctx, sc, err) 939 }() 940 } 941 if err := validation.Validate([]validation.Validation{ 942 {TargetValue: width, 943 Constraints: []validation.Constraint{{Target: "width", Name: validation.InclusiveMaximum, Rule: int64(1024), Chain: nil}, 944 {Target: "width", Name: validation.InclusiveMinimum, Rule: int64(1), Chain: nil}}}, 945 {TargetValue: height, 946 Constraints: []validation.Constraint{{Target: "height", Name: validation.InclusiveMaximum, Rule: int64(1024), Chain: nil}, 947 {Target: "height", Name: validation.InclusiveMinimum, Rule: int64(1), Chain: nil}}}}); err != nil { 948 return result, validation.NewError("computervision.BaseClient", "GenerateThumbnailInStream", err.Error()) 949 } 950 951 req, err := client.GenerateThumbnailInStreamPreparer(ctx, width, height, imageParameter, smartCropping) 952 if err != nil { 953 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "GenerateThumbnailInStream", nil, "Failure preparing request") 954 return 955 } 956 957 resp, err := client.GenerateThumbnailInStreamSender(req) 958 if err != nil { 959 result.Response = autorest.Response{Response: resp} 960 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "GenerateThumbnailInStream", resp, "Failure sending request") 961 return 962 } 963 964 result, err = client.GenerateThumbnailInStreamResponder(resp) 965 if err != nil { 966 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "GenerateThumbnailInStream", resp, "Failure responding to request") 967 return 968 } 969 970 return 971} 972 973// GenerateThumbnailInStreamPreparer prepares the GenerateThumbnailInStream request. 974func (client BaseClient) GenerateThumbnailInStreamPreparer(ctx context.Context, width int32, height int32, imageParameter io.ReadCloser, smartCropping *bool) (*http.Request, error) { 975 urlParameters := map[string]interface{}{ 976 "Endpoint": client.Endpoint, 977 } 978 979 queryParameters := map[string]interface{}{ 980 "height": autorest.Encode("query", height), 981 "width": autorest.Encode("query", width), 982 } 983 if smartCropping != nil { 984 queryParameters["smartCropping"] = autorest.Encode("query", *smartCropping) 985 } else { 986 queryParameters["smartCropping"] = autorest.Encode("query", false) 987 } 988 989 preparer := autorest.CreatePreparer( 990 autorest.AsContentType("application/octet-stream"), 991 autorest.AsPost(), 992 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 993 autorest.WithPath("/generateThumbnail"), 994 autorest.WithFile(imageParameter), 995 autorest.WithQueryParameters(queryParameters)) 996 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 997} 998 999// GenerateThumbnailInStreamSender sends the GenerateThumbnailInStream request. The method will close the 1000// http.Response Body if it receives an error. 1001func (client BaseClient) GenerateThumbnailInStreamSender(req *http.Request) (*http.Response, error) { 1002 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 1003} 1004 1005// GenerateThumbnailInStreamResponder handles the response to the GenerateThumbnailInStream request. The method always 1006// closes the http.Response Body. 1007func (client BaseClient) GenerateThumbnailInStreamResponder(resp *http.Response) (result ReadCloser, err error) { 1008 result.Value = &resp.Body 1009 err = autorest.Respond( 1010 resp, 1011 azure.WithErrorUnlessStatusCode(http.StatusOK)) 1012 result.Response = autorest.Response{Response: resp} 1013 return 1014} 1015 1016// GetAreaOfInterest this operation returns a bounding box around the most important area of the image. 1017// A successful response will be returned in JSON. If the request failed, the response contains an error code and a 1018// message to help determine what went wrong. 1019// Upon failure, the error code and an error message are returned. The error code could be one of InvalidImageUrl, 1020// InvalidImageFormat, InvalidImageSize, NotSupportedImage, FailedToProcess, Timeout, or InternalServerError. 1021// Parameters: 1022// imageURL - a JSON document with a URL pointing to the image that is to be analyzed. 1023func (client BaseClient) GetAreaOfInterest(ctx context.Context, imageURL ImageURL) (result AreaOfInterestResult, err error) { 1024 if tracing.IsEnabled() { 1025 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.GetAreaOfInterest") 1026 defer func() { 1027 sc := -1 1028 if result.Response.Response != nil { 1029 sc = result.Response.Response.StatusCode 1030 } 1031 tracing.EndSpan(ctx, sc, err) 1032 }() 1033 } 1034 if err := validation.Validate([]validation.Validation{ 1035 {TargetValue: imageURL, 1036 Constraints: []validation.Constraint{{Target: "imageURL.URL", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil { 1037 return result, validation.NewError("computervision.BaseClient", "GetAreaOfInterest", err.Error()) 1038 } 1039 1040 req, err := client.GetAreaOfInterestPreparer(ctx, imageURL) 1041 if err != nil { 1042 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "GetAreaOfInterest", nil, "Failure preparing request") 1043 return 1044 } 1045 1046 resp, err := client.GetAreaOfInterestSender(req) 1047 if err != nil { 1048 result.Response = autorest.Response{Response: resp} 1049 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "GetAreaOfInterest", resp, "Failure sending request") 1050 return 1051 } 1052 1053 result, err = client.GetAreaOfInterestResponder(resp) 1054 if err != nil { 1055 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "GetAreaOfInterest", resp, "Failure responding to request") 1056 return 1057 } 1058 1059 return 1060} 1061 1062// GetAreaOfInterestPreparer prepares the GetAreaOfInterest request. 1063func (client BaseClient) GetAreaOfInterestPreparer(ctx context.Context, imageURL ImageURL) (*http.Request, error) { 1064 urlParameters := map[string]interface{}{ 1065 "Endpoint": client.Endpoint, 1066 } 1067 1068 preparer := autorest.CreatePreparer( 1069 autorest.AsContentType("application/json; charset=utf-8"), 1070 autorest.AsPost(), 1071 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 1072 autorest.WithPath("/areaOfInterest"), 1073 autorest.WithJSON(imageURL)) 1074 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 1075} 1076 1077// GetAreaOfInterestSender sends the GetAreaOfInterest request. The method will close the 1078// http.Response Body if it receives an error. 1079func (client BaseClient) GetAreaOfInterestSender(req *http.Request) (*http.Response, error) { 1080 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 1081} 1082 1083// GetAreaOfInterestResponder handles the response to the GetAreaOfInterest request. The method always 1084// closes the http.Response Body. 1085func (client BaseClient) GetAreaOfInterestResponder(resp *http.Response) (result AreaOfInterestResult, err error) { 1086 err = autorest.Respond( 1087 resp, 1088 azure.WithErrorUnlessStatusCode(http.StatusOK), 1089 autorest.ByUnmarshallingJSON(&result), 1090 autorest.ByClosing()) 1091 result.Response = autorest.Response{Response: resp} 1092 return 1093} 1094 1095// GetAreaOfInterestInStream this operation returns a bounding box around the most important area of the image. 1096// A successful response will be returned in JSON. If the request failed, the response contains an error code and a 1097// message to help determine what went wrong. 1098// Upon failure, the error code and an error message are returned. The error code could be one of InvalidImageUrl, 1099// InvalidImageFormat, InvalidImageSize, NotSupportedImage, FailedToProcess, Timeout, or InternalServerError. 1100// Parameters: 1101// imageParameter - an image stream. 1102func (client BaseClient) GetAreaOfInterestInStream(ctx context.Context, imageParameter io.ReadCloser) (result AreaOfInterestResult, err error) { 1103 if tracing.IsEnabled() { 1104 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.GetAreaOfInterestInStream") 1105 defer func() { 1106 sc := -1 1107 if result.Response.Response != nil { 1108 sc = result.Response.Response.StatusCode 1109 } 1110 tracing.EndSpan(ctx, sc, err) 1111 }() 1112 } 1113 req, err := client.GetAreaOfInterestInStreamPreparer(ctx, imageParameter) 1114 if err != nil { 1115 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "GetAreaOfInterestInStream", nil, "Failure preparing request") 1116 return 1117 } 1118 1119 resp, err := client.GetAreaOfInterestInStreamSender(req) 1120 if err != nil { 1121 result.Response = autorest.Response{Response: resp} 1122 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "GetAreaOfInterestInStream", resp, "Failure sending request") 1123 return 1124 } 1125 1126 result, err = client.GetAreaOfInterestInStreamResponder(resp) 1127 if err != nil { 1128 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "GetAreaOfInterestInStream", resp, "Failure responding to request") 1129 return 1130 } 1131 1132 return 1133} 1134 1135// GetAreaOfInterestInStreamPreparer prepares the GetAreaOfInterestInStream request. 1136func (client BaseClient) GetAreaOfInterestInStreamPreparer(ctx context.Context, imageParameter io.ReadCloser) (*http.Request, error) { 1137 urlParameters := map[string]interface{}{ 1138 "Endpoint": client.Endpoint, 1139 } 1140 1141 preparer := autorest.CreatePreparer( 1142 autorest.AsContentType("application/octet-stream"), 1143 autorest.AsPost(), 1144 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 1145 autorest.WithPath("/areaOfInterest"), 1146 autorest.WithFile(imageParameter)) 1147 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 1148} 1149 1150// GetAreaOfInterestInStreamSender sends the GetAreaOfInterestInStream request. The method will close the 1151// http.Response Body if it receives an error. 1152func (client BaseClient) GetAreaOfInterestInStreamSender(req *http.Request) (*http.Response, error) { 1153 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 1154} 1155 1156// GetAreaOfInterestInStreamResponder handles the response to the GetAreaOfInterestInStream request. The method always 1157// closes the http.Response Body. 1158func (client BaseClient) GetAreaOfInterestInStreamResponder(resp *http.Response) (result AreaOfInterestResult, err error) { 1159 err = autorest.Respond( 1160 resp, 1161 azure.WithErrorUnlessStatusCode(http.StatusOK), 1162 autorest.ByUnmarshallingJSON(&result), 1163 autorest.ByClosing()) 1164 result.Response = autorest.Response{Response: resp} 1165 return 1166} 1167 1168// GetReadResult this interface is used for getting OCR results of Read operation. The URL to this interface should be 1169// retrieved from 'Operation-Location' field returned from Read interface. 1170// Parameters: 1171// operationID - id of read operation returned in the response of the 'Read' interface. 1172func (client BaseClient) GetReadResult(ctx context.Context, operationID uuid.UUID) (result ReadOperationResult, err error) { 1173 if tracing.IsEnabled() { 1174 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.GetReadResult") 1175 defer func() { 1176 sc := -1 1177 if result.Response.Response != nil { 1178 sc = result.Response.Response.StatusCode 1179 } 1180 tracing.EndSpan(ctx, sc, err) 1181 }() 1182 } 1183 req, err := client.GetReadResultPreparer(ctx, operationID) 1184 if err != nil { 1185 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "GetReadResult", nil, "Failure preparing request") 1186 return 1187 } 1188 1189 resp, err := client.GetReadResultSender(req) 1190 if err != nil { 1191 result.Response = autorest.Response{Response: resp} 1192 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "GetReadResult", resp, "Failure sending request") 1193 return 1194 } 1195 1196 result, err = client.GetReadResultResponder(resp) 1197 if err != nil { 1198 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "GetReadResult", resp, "Failure responding to request") 1199 return 1200 } 1201 1202 return 1203} 1204 1205// GetReadResultPreparer prepares the GetReadResult request. 1206func (client BaseClient) GetReadResultPreparer(ctx context.Context, operationID uuid.UUID) (*http.Request, error) { 1207 urlParameters := map[string]interface{}{ 1208 "Endpoint": client.Endpoint, 1209 } 1210 1211 pathParameters := map[string]interface{}{ 1212 "operationId": autorest.Encode("path", operationID), 1213 } 1214 1215 preparer := autorest.CreatePreparer( 1216 autorest.AsGet(), 1217 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 1218 autorest.WithPathParameters("/read/analyzeResults/{operationId}", pathParameters)) 1219 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 1220} 1221 1222// GetReadResultSender sends the GetReadResult request. The method will close the 1223// http.Response Body if it receives an error. 1224func (client BaseClient) GetReadResultSender(req *http.Request) (*http.Response, error) { 1225 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 1226} 1227 1228// GetReadResultResponder handles the response to the GetReadResult request. The method always 1229// closes the http.Response Body. 1230func (client BaseClient) GetReadResultResponder(resp *http.Response) (result ReadOperationResult, err error) { 1231 err = autorest.Respond( 1232 resp, 1233 azure.WithErrorUnlessStatusCode(http.StatusOK), 1234 autorest.ByUnmarshallingJSON(&result), 1235 autorest.ByClosing()) 1236 result.Response = autorest.Response{Response: resp} 1237 return 1238} 1239 1240// ListModels this operation returns the list of domain-specific models that are supported by the Computer Vision API. 1241// Currently, the API supports following domain-specific models: celebrity recognizer, landmark recognizer. 1242// A successful response will be returned in JSON. If the request failed, the response will contain an error code and a 1243// message to help understand what went wrong. 1244func (client BaseClient) ListModels(ctx context.Context) (result ListModelsResult, err error) { 1245 if tracing.IsEnabled() { 1246 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.ListModels") 1247 defer func() { 1248 sc := -1 1249 if result.Response.Response != nil { 1250 sc = result.Response.Response.StatusCode 1251 } 1252 tracing.EndSpan(ctx, sc, err) 1253 }() 1254 } 1255 req, err := client.ListModelsPreparer(ctx) 1256 if err != nil { 1257 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "ListModels", nil, "Failure preparing request") 1258 return 1259 } 1260 1261 resp, err := client.ListModelsSender(req) 1262 if err != nil { 1263 result.Response = autorest.Response{Response: resp} 1264 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "ListModels", resp, "Failure sending request") 1265 return 1266 } 1267 1268 result, err = client.ListModelsResponder(resp) 1269 if err != nil { 1270 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "ListModels", resp, "Failure responding to request") 1271 return 1272 } 1273 1274 return 1275} 1276 1277// ListModelsPreparer prepares the ListModels request. 1278func (client BaseClient) ListModelsPreparer(ctx context.Context) (*http.Request, error) { 1279 urlParameters := map[string]interface{}{ 1280 "Endpoint": client.Endpoint, 1281 } 1282 1283 preparer := autorest.CreatePreparer( 1284 autorest.AsGet(), 1285 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 1286 autorest.WithPath("/models")) 1287 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 1288} 1289 1290// ListModelsSender sends the ListModels request. The method will close the 1291// http.Response Body if it receives an error. 1292func (client BaseClient) ListModelsSender(req *http.Request) (*http.Response, error) { 1293 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 1294} 1295 1296// ListModelsResponder handles the response to the ListModels request. The method always 1297// closes the http.Response Body. 1298func (client BaseClient) ListModelsResponder(resp *http.Response) (result ListModelsResult, err error) { 1299 err = autorest.Respond( 1300 resp, 1301 azure.WithErrorUnlessStatusCode(http.StatusOK), 1302 autorest.ByUnmarshallingJSON(&result), 1303 autorest.ByClosing()) 1304 result.Response = autorest.Response{Response: resp} 1305 return 1306} 1307 1308// Read use this interface to get the result of a Read operation, employing the state-of-the-art Optical Character 1309// Recognition (OCR) algorithms optimized for text-heavy documents. When you use the Read interface, the response 1310// contains a field called 'Operation-Location'. The 'Operation-Location' field contains the URL that you must use for 1311// your 'GetReadResult' operation to access OCR results. 1312// Parameters: 1313// imageURL - a JSON document with a URL pointing to the image that is to be analyzed. 1314// language - the BCP-47 language code of the text in the document. Currently, only English ('en'), Dutch 1315// (‘nl’), French (‘fr’), German (‘de’), Italian (‘it’), Portuguese (‘pt), and Spanish ('es') are supported. 1316// Read supports auto language identification and multi-language documents, so only provide a language code if 1317// you would like to force the documented to be processed as that specific language. 1318func (client BaseClient) Read(ctx context.Context, imageURL ImageURL, language OcrDetectionLanguage) (result autorest.Response, err error) { 1319 if tracing.IsEnabled() { 1320 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.Read") 1321 defer func() { 1322 sc := -1 1323 if result.Response != nil { 1324 sc = result.Response.StatusCode 1325 } 1326 tracing.EndSpan(ctx, sc, err) 1327 }() 1328 } 1329 if err := validation.Validate([]validation.Validation{ 1330 {TargetValue: imageURL, 1331 Constraints: []validation.Constraint{{Target: "imageURL.URL", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil { 1332 return result, validation.NewError("computervision.BaseClient", "Read", err.Error()) 1333 } 1334 1335 req, err := client.ReadPreparer(ctx, imageURL, language) 1336 if err != nil { 1337 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "Read", nil, "Failure preparing request") 1338 return 1339 } 1340 1341 resp, err := client.ReadSender(req) 1342 if err != nil { 1343 result.Response = resp 1344 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "Read", resp, "Failure sending request") 1345 return 1346 } 1347 1348 result, err = client.ReadResponder(resp) 1349 if err != nil { 1350 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "Read", resp, "Failure responding to request") 1351 return 1352 } 1353 1354 return 1355} 1356 1357// ReadPreparer prepares the Read request. 1358func (client BaseClient) ReadPreparer(ctx context.Context, imageURL ImageURL, language OcrDetectionLanguage) (*http.Request, error) { 1359 urlParameters := map[string]interface{}{ 1360 "Endpoint": client.Endpoint, 1361 } 1362 1363 queryParameters := map[string]interface{}{} 1364 if len(string(language)) > 0 { 1365 queryParameters["language"] = autorest.Encode("query", language) 1366 } else { 1367 queryParameters["language"] = autorest.Encode("query", "en") 1368 } 1369 1370 preparer := autorest.CreatePreparer( 1371 autorest.AsContentType("application/json; charset=utf-8"), 1372 autorest.AsPost(), 1373 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 1374 autorest.WithPath("/read/analyze"), 1375 autorest.WithJSON(imageURL), 1376 autorest.WithQueryParameters(queryParameters)) 1377 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 1378} 1379 1380// ReadSender sends the Read request. The method will close the 1381// http.Response Body if it receives an error. 1382func (client BaseClient) ReadSender(req *http.Request) (*http.Response, error) { 1383 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 1384} 1385 1386// ReadResponder handles the response to the Read request. The method always 1387// closes the http.Response Body. 1388func (client BaseClient) ReadResponder(resp *http.Response) (result autorest.Response, err error) { 1389 err = autorest.Respond( 1390 resp, 1391 azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), 1392 autorest.ByClosing()) 1393 result.Response = resp 1394 return 1395} 1396 1397// ReadInStream use this interface to get the result of a Read operation, employing the state-of-the-art Optical 1398// Character Recognition (OCR) algorithms optimized for text-heavy documents. When you use the Read interface, the 1399// response contains a field called 'Operation-Location'. The 'Operation-Location' field contains the URL that you must 1400// use for your 'GetReadResult' operation to access OCR results. 1401// Parameters: 1402// imageParameter - an image stream. 1403// language - the BCP-47 language code of the text in the document. Currently, only English ('en'), Dutch 1404// (‘nl’), French (‘fr’), German (‘de’), Italian (‘it’), Portuguese (‘pt), and Spanish ('es') are supported. 1405// Read supports auto language identification and multi-language documents, so only provide a language code if 1406// you would like to force the documented to be processed as that specific language. 1407func (client BaseClient) ReadInStream(ctx context.Context, imageParameter io.ReadCloser, language OcrDetectionLanguage) (result autorest.Response, err error) { 1408 if tracing.IsEnabled() { 1409 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.ReadInStream") 1410 defer func() { 1411 sc := -1 1412 if result.Response != nil { 1413 sc = result.Response.StatusCode 1414 } 1415 tracing.EndSpan(ctx, sc, err) 1416 }() 1417 } 1418 req, err := client.ReadInStreamPreparer(ctx, imageParameter, language) 1419 if err != nil { 1420 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "ReadInStream", nil, "Failure preparing request") 1421 return 1422 } 1423 1424 resp, err := client.ReadInStreamSender(req) 1425 if err != nil { 1426 result.Response = resp 1427 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "ReadInStream", resp, "Failure sending request") 1428 return 1429 } 1430 1431 result, err = client.ReadInStreamResponder(resp) 1432 if err != nil { 1433 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "ReadInStream", resp, "Failure responding to request") 1434 return 1435 } 1436 1437 return 1438} 1439 1440// ReadInStreamPreparer prepares the ReadInStream request. 1441func (client BaseClient) ReadInStreamPreparer(ctx context.Context, imageParameter io.ReadCloser, language OcrDetectionLanguage) (*http.Request, error) { 1442 urlParameters := map[string]interface{}{ 1443 "Endpoint": client.Endpoint, 1444 } 1445 1446 queryParameters := map[string]interface{}{} 1447 if len(string(language)) > 0 { 1448 queryParameters["language"] = autorest.Encode("query", language) 1449 } else { 1450 queryParameters["language"] = autorest.Encode("query", "en") 1451 } 1452 1453 preparer := autorest.CreatePreparer( 1454 autorest.AsContentType("application/octet-stream"), 1455 autorest.AsPost(), 1456 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 1457 autorest.WithPath("/read/analyze"), 1458 autorest.WithFile(imageParameter), 1459 autorest.WithQueryParameters(queryParameters)) 1460 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 1461} 1462 1463// ReadInStreamSender sends the ReadInStream request. The method will close the 1464// http.Response Body if it receives an error. 1465func (client BaseClient) ReadInStreamSender(req *http.Request) (*http.Response, error) { 1466 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 1467} 1468 1469// ReadInStreamResponder handles the response to the ReadInStream request. The method always 1470// closes the http.Response Body. 1471func (client BaseClient) ReadInStreamResponder(resp *http.Response) (result autorest.Response, err error) { 1472 err = autorest.Respond( 1473 resp, 1474 azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), 1475 autorest.ByClosing()) 1476 result.Response = resp 1477 return 1478} 1479 1480// RecognizePrintedText optical Character Recognition (OCR) detects text in an image and extracts the recognized 1481// characters into a machine-usable character stream. 1482// Upon success, the OCR results will be returned. 1483// Upon failure, the error code together with an error message will be returned. The error code can be one of 1484// InvalidImageUrl, InvalidImageFormat, InvalidImageSize, NotSupportedImage, NotSupportedLanguage, or 1485// InternalServerError. 1486// Parameters: 1487// detectOrientation - whether detect the text orientation in the image. With detectOrientation=true the OCR 1488// service tries to detect the image orientation and correct it before further processing (e.g. if it's 1489// upside-down). 1490// imageURL - a JSON document with a URL pointing to the image that is to be analyzed. 1491// language - the BCP-47 language code of the text to be detected in the image. The default value is 'unk'. 1492func (client BaseClient) RecognizePrintedText(ctx context.Context, detectOrientation bool, imageURL ImageURL, language OcrLanguages) (result OcrResult, err error) { 1493 if tracing.IsEnabled() { 1494 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.RecognizePrintedText") 1495 defer func() { 1496 sc := -1 1497 if result.Response.Response != nil { 1498 sc = result.Response.Response.StatusCode 1499 } 1500 tracing.EndSpan(ctx, sc, err) 1501 }() 1502 } 1503 if err := validation.Validate([]validation.Validation{ 1504 {TargetValue: imageURL, 1505 Constraints: []validation.Constraint{{Target: "imageURL.URL", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil { 1506 return result, validation.NewError("computervision.BaseClient", "RecognizePrintedText", err.Error()) 1507 } 1508 1509 req, err := client.RecognizePrintedTextPreparer(ctx, detectOrientation, imageURL, language) 1510 if err != nil { 1511 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "RecognizePrintedText", nil, "Failure preparing request") 1512 return 1513 } 1514 1515 resp, err := client.RecognizePrintedTextSender(req) 1516 if err != nil { 1517 result.Response = autorest.Response{Response: resp} 1518 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "RecognizePrintedText", resp, "Failure sending request") 1519 return 1520 } 1521 1522 result, err = client.RecognizePrintedTextResponder(resp) 1523 if err != nil { 1524 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "RecognizePrintedText", resp, "Failure responding to request") 1525 return 1526 } 1527 1528 return 1529} 1530 1531// RecognizePrintedTextPreparer prepares the RecognizePrintedText request. 1532func (client BaseClient) RecognizePrintedTextPreparer(ctx context.Context, detectOrientation bool, imageURL ImageURL, language OcrLanguages) (*http.Request, error) { 1533 urlParameters := map[string]interface{}{ 1534 "Endpoint": client.Endpoint, 1535 } 1536 1537 queryParameters := map[string]interface{}{ 1538 "detectOrientation": autorest.Encode("query", detectOrientation), 1539 } 1540 if len(string(language)) > 0 { 1541 queryParameters["language"] = autorest.Encode("query", language) 1542 } else { 1543 queryParameters["language"] = autorest.Encode("query", "unk") 1544 } 1545 1546 preparer := autorest.CreatePreparer( 1547 autorest.AsContentType("application/json; charset=utf-8"), 1548 autorest.AsPost(), 1549 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 1550 autorest.WithPath("/ocr"), 1551 autorest.WithJSON(imageURL), 1552 autorest.WithQueryParameters(queryParameters)) 1553 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 1554} 1555 1556// RecognizePrintedTextSender sends the RecognizePrintedText request. The method will close the 1557// http.Response Body if it receives an error. 1558func (client BaseClient) RecognizePrintedTextSender(req *http.Request) (*http.Response, error) { 1559 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 1560} 1561 1562// RecognizePrintedTextResponder handles the response to the RecognizePrintedText request. The method always 1563// closes the http.Response Body. 1564func (client BaseClient) RecognizePrintedTextResponder(resp *http.Response) (result OcrResult, err error) { 1565 err = autorest.Respond( 1566 resp, 1567 azure.WithErrorUnlessStatusCode(http.StatusOK), 1568 autorest.ByUnmarshallingJSON(&result), 1569 autorest.ByClosing()) 1570 result.Response = autorest.Response{Response: resp} 1571 return 1572} 1573 1574// RecognizePrintedTextInStream optical Character Recognition (OCR) detects text in an image and extracts the 1575// recognized characters into a machine-usable character stream. 1576// Upon success, the OCR results will be returned. 1577// Upon failure, the error code together with an error message will be returned. The error code can be one of 1578// InvalidImageUrl, InvalidImageFormat, InvalidImageSize, NotSupportedImage, NotSupportedLanguage, or 1579// InternalServerError. 1580// Parameters: 1581// detectOrientation - whether detect the text orientation in the image. With detectOrientation=true the OCR 1582// service tries to detect the image orientation and correct it before further processing (e.g. if it's 1583// upside-down). 1584// imageParameter - an image stream. 1585// language - the BCP-47 language code of the text to be detected in the image. The default value is 'unk'. 1586func (client BaseClient) RecognizePrintedTextInStream(ctx context.Context, detectOrientation bool, imageParameter io.ReadCloser, language OcrLanguages) (result OcrResult, err error) { 1587 if tracing.IsEnabled() { 1588 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.RecognizePrintedTextInStream") 1589 defer func() { 1590 sc := -1 1591 if result.Response.Response != nil { 1592 sc = result.Response.Response.StatusCode 1593 } 1594 tracing.EndSpan(ctx, sc, err) 1595 }() 1596 } 1597 req, err := client.RecognizePrintedTextInStreamPreparer(ctx, detectOrientation, imageParameter, language) 1598 if err != nil { 1599 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "RecognizePrintedTextInStream", nil, "Failure preparing request") 1600 return 1601 } 1602 1603 resp, err := client.RecognizePrintedTextInStreamSender(req) 1604 if err != nil { 1605 result.Response = autorest.Response{Response: resp} 1606 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "RecognizePrintedTextInStream", resp, "Failure sending request") 1607 return 1608 } 1609 1610 result, err = client.RecognizePrintedTextInStreamResponder(resp) 1611 if err != nil { 1612 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "RecognizePrintedTextInStream", resp, "Failure responding to request") 1613 return 1614 } 1615 1616 return 1617} 1618 1619// RecognizePrintedTextInStreamPreparer prepares the RecognizePrintedTextInStream request. 1620func (client BaseClient) RecognizePrintedTextInStreamPreparer(ctx context.Context, detectOrientation bool, imageParameter io.ReadCloser, language OcrLanguages) (*http.Request, error) { 1621 urlParameters := map[string]interface{}{ 1622 "Endpoint": client.Endpoint, 1623 } 1624 1625 queryParameters := map[string]interface{}{ 1626 "detectOrientation": autorest.Encode("query", detectOrientation), 1627 } 1628 if len(string(language)) > 0 { 1629 queryParameters["language"] = autorest.Encode("query", language) 1630 } else { 1631 queryParameters["language"] = autorest.Encode("query", "unk") 1632 } 1633 1634 preparer := autorest.CreatePreparer( 1635 autorest.AsContentType("application/octet-stream"), 1636 autorest.AsPost(), 1637 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 1638 autorest.WithPath("/ocr"), 1639 autorest.WithFile(imageParameter), 1640 autorest.WithQueryParameters(queryParameters)) 1641 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 1642} 1643 1644// RecognizePrintedTextInStreamSender sends the RecognizePrintedTextInStream request. The method will close the 1645// http.Response Body if it receives an error. 1646func (client BaseClient) RecognizePrintedTextInStreamSender(req *http.Request) (*http.Response, error) { 1647 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 1648} 1649 1650// RecognizePrintedTextInStreamResponder handles the response to the RecognizePrintedTextInStream request. The method always 1651// closes the http.Response Body. 1652func (client BaseClient) RecognizePrintedTextInStreamResponder(resp *http.Response) (result OcrResult, err error) { 1653 err = autorest.Respond( 1654 resp, 1655 azure.WithErrorUnlessStatusCode(http.StatusOK), 1656 autorest.ByUnmarshallingJSON(&result), 1657 autorest.ByClosing()) 1658 result.Response = autorest.Response{Response: resp} 1659 return 1660} 1661 1662// TagImage this operation generates a list of words, or tags, that are relevant to the content of the supplied image. 1663// The Computer Vision API can return tags based on objects, living beings, scenery or actions found in images. Unlike 1664// categories, tags are not organized according to a hierarchical classification system, but correspond to image 1665// content. Tags may contain hints to avoid ambiguity or provide context, for example the tag "ascomycete" may be 1666// accompanied by the hint "fungus". 1667// Two input methods are supported -- (1) Uploading an image or (2) specifying an image URL. 1668// A successful response will be returned in JSON. If the request failed, the response will contain an error code and a 1669// message to help understand what went wrong. 1670// Parameters: 1671// imageURL - a JSON document with a URL pointing to the image that is to be analyzed. 1672// language - the desired language for output generation. If this parameter is not specified, the default value 1673// is "en".Supported languages:en - English, Default. es - Spanish, ja - Japanese, pt - Portuguese, 1674// zh - Simplified Chinese. 1675func (client BaseClient) TagImage(ctx context.Context, imageURL ImageURL, language string) (result TagResult, err error) { 1676 if tracing.IsEnabled() { 1677 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.TagImage") 1678 defer func() { 1679 sc := -1 1680 if result.Response.Response != nil { 1681 sc = result.Response.Response.StatusCode 1682 } 1683 tracing.EndSpan(ctx, sc, err) 1684 }() 1685 } 1686 if err := validation.Validate([]validation.Validation{ 1687 {TargetValue: imageURL, 1688 Constraints: []validation.Constraint{{Target: "imageURL.URL", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil { 1689 return result, validation.NewError("computervision.BaseClient", "TagImage", err.Error()) 1690 } 1691 1692 req, err := client.TagImagePreparer(ctx, imageURL, language) 1693 if err != nil { 1694 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "TagImage", nil, "Failure preparing request") 1695 return 1696 } 1697 1698 resp, err := client.TagImageSender(req) 1699 if err != nil { 1700 result.Response = autorest.Response{Response: resp} 1701 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "TagImage", resp, "Failure sending request") 1702 return 1703 } 1704 1705 result, err = client.TagImageResponder(resp) 1706 if err != nil { 1707 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "TagImage", resp, "Failure responding to request") 1708 return 1709 } 1710 1711 return 1712} 1713 1714// TagImagePreparer prepares the TagImage request. 1715func (client BaseClient) TagImagePreparer(ctx context.Context, imageURL ImageURL, language string) (*http.Request, error) { 1716 urlParameters := map[string]interface{}{ 1717 "Endpoint": client.Endpoint, 1718 } 1719 1720 queryParameters := map[string]interface{}{} 1721 if len(string(language)) > 0 { 1722 queryParameters["language"] = autorest.Encode("query", language) 1723 } else { 1724 queryParameters["language"] = autorest.Encode("query", "en") 1725 } 1726 1727 preparer := autorest.CreatePreparer( 1728 autorest.AsContentType("application/json; charset=utf-8"), 1729 autorest.AsPost(), 1730 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 1731 autorest.WithPath("/tag"), 1732 autorest.WithJSON(imageURL), 1733 autorest.WithQueryParameters(queryParameters)) 1734 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 1735} 1736 1737// TagImageSender sends the TagImage request. The method will close the 1738// http.Response Body if it receives an error. 1739func (client BaseClient) TagImageSender(req *http.Request) (*http.Response, error) { 1740 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 1741} 1742 1743// TagImageResponder handles the response to the TagImage request. The method always 1744// closes the http.Response Body. 1745func (client BaseClient) TagImageResponder(resp *http.Response) (result TagResult, err error) { 1746 err = autorest.Respond( 1747 resp, 1748 azure.WithErrorUnlessStatusCode(http.StatusOK), 1749 autorest.ByUnmarshallingJSON(&result), 1750 autorest.ByClosing()) 1751 result.Response = autorest.Response{Response: resp} 1752 return 1753} 1754 1755// TagImageInStream this operation generates a list of words, or tags, that are relevant to the content of the supplied 1756// image. The Computer Vision API can return tags based on objects, living beings, scenery or actions found in images. 1757// Unlike categories, tags are not organized according to a hierarchical classification system, but correspond to image 1758// content. Tags may contain hints to avoid ambiguity or provide context, for example the tag "ascomycete" may be 1759// accompanied by the hint "fungus". 1760// Two input methods are supported -- (1) Uploading an image or (2) specifying an image URL. 1761// A successful response will be returned in JSON. If the request failed, the response will contain an error code and a 1762// message to help understand what went wrong. 1763// Parameters: 1764// imageParameter - an image stream. 1765// language - the desired language for output generation. If this parameter is not specified, the default value 1766// is "en".Supported languages:en - English, Default. es - Spanish, ja - Japanese, pt - Portuguese, 1767// zh - Simplified Chinese. 1768func (client BaseClient) TagImageInStream(ctx context.Context, imageParameter io.ReadCloser, language string) (result TagResult, err error) { 1769 if tracing.IsEnabled() { 1770 ctx = tracing.StartSpan(ctx, fqdn+"/BaseClient.TagImageInStream") 1771 defer func() { 1772 sc := -1 1773 if result.Response.Response != nil { 1774 sc = result.Response.Response.StatusCode 1775 } 1776 tracing.EndSpan(ctx, sc, err) 1777 }() 1778 } 1779 req, err := client.TagImageInStreamPreparer(ctx, imageParameter, language) 1780 if err != nil { 1781 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "TagImageInStream", nil, "Failure preparing request") 1782 return 1783 } 1784 1785 resp, err := client.TagImageInStreamSender(req) 1786 if err != nil { 1787 result.Response = autorest.Response{Response: resp} 1788 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "TagImageInStream", resp, "Failure sending request") 1789 return 1790 } 1791 1792 result, err = client.TagImageInStreamResponder(resp) 1793 if err != nil { 1794 err = autorest.NewErrorWithError(err, "computervision.BaseClient", "TagImageInStream", resp, "Failure responding to request") 1795 return 1796 } 1797 1798 return 1799} 1800 1801// TagImageInStreamPreparer prepares the TagImageInStream request. 1802func (client BaseClient) TagImageInStreamPreparer(ctx context.Context, imageParameter io.ReadCloser, language string) (*http.Request, error) { 1803 urlParameters := map[string]interface{}{ 1804 "Endpoint": client.Endpoint, 1805 } 1806 1807 queryParameters := map[string]interface{}{} 1808 if len(string(language)) > 0 { 1809 queryParameters["language"] = autorest.Encode("query", language) 1810 } else { 1811 queryParameters["language"] = autorest.Encode("query", "en") 1812 } 1813 1814 preparer := autorest.CreatePreparer( 1815 autorest.AsContentType("application/octet-stream"), 1816 autorest.AsPost(), 1817 autorest.WithCustomBaseURL("{Endpoint}/vision/v3.1", urlParameters), 1818 autorest.WithPath("/tag"), 1819 autorest.WithFile(imageParameter), 1820 autorest.WithQueryParameters(queryParameters)) 1821 return preparer.Prepare((&http.Request{}).WithContext(ctx)) 1822} 1823 1824// TagImageInStreamSender sends the TagImageInStream request. The method will close the 1825// http.Response Body if it receives an error. 1826func (client BaseClient) TagImageInStreamSender(req *http.Request) (*http.Response, error) { 1827 return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) 1828} 1829 1830// TagImageInStreamResponder handles the response to the TagImageInStream request. The method always 1831// closes the http.Response Body. 1832func (client BaseClient) TagImageInStreamResponder(resp *http.Response) (result TagResult, err error) { 1833 err = autorest.Respond( 1834 resp, 1835 azure.WithErrorUnlessStatusCode(http.StatusOK), 1836 autorest.ByUnmarshallingJSON(&result), 1837 autorest.ByClosing()) 1838 result.Response = autorest.Response{Response: resp} 1839 return 1840} 1841