1// Code generated by smithy-go-codegen DO NOT EDIT. 2 3package machinelearning 4 5import ( 6 "context" 7 awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware" 8 "github.com/aws/aws-sdk-go-v2/aws/signer/v4" 9 "github.com/aws/smithy-go/middleware" 10 smithyhttp "github.com/aws/smithy-go/transport/http" 11) 12 13// Generates predictions for a group of observations. The observations to process 14// exist in one or more data files referenced by a DataSource. This operation 15// creates a new BatchPrediction, and uses an MLModel and the data files referenced 16// by the DataSource as information sources. CreateBatchPrediction is an 17// asynchronous operation. In response to CreateBatchPrediction, Amazon Machine 18// Learning (Amazon ML) immediately returns and sets the BatchPrediction status to 19// PENDING. After the BatchPrediction completes, Amazon ML sets the status to 20// COMPLETED. You can poll for status updates by using the GetBatchPrediction 21// operation and checking the Status parameter of the result. After the COMPLETED 22// status appears, the results are available in the location specified by the 23// OutputUri parameter. 24func (c *Client) CreateBatchPrediction(ctx context.Context, params *CreateBatchPredictionInput, optFns ...func(*Options)) (*CreateBatchPredictionOutput, error) { 25 if params == nil { 26 params = &CreateBatchPredictionInput{} 27 } 28 29 result, metadata, err := c.invokeOperation(ctx, "CreateBatchPrediction", params, optFns, addOperationCreateBatchPredictionMiddlewares) 30 if err != nil { 31 return nil, err 32 } 33 34 out := result.(*CreateBatchPredictionOutput) 35 out.ResultMetadata = metadata 36 return out, nil 37} 38 39type CreateBatchPredictionInput struct { 40 41 // The ID of the DataSource that points to the group of observations to predict. 42 // 43 // This member is required. 44 BatchPredictionDataSourceId *string 45 46 // A user-supplied ID that uniquely identifies the BatchPrediction. 47 // 48 // This member is required. 49 BatchPredictionId *string 50 51 // The ID of the MLModel that will generate predictions for the group of 52 // observations. 53 // 54 // This member is required. 55 MLModelId *string 56 57 // The location of an Amazon Simple Storage Service (Amazon S3) bucket or directory 58 // to store the batch prediction results. The following substrings are not allowed 59 // in the s3 key portion of the outputURI field: ':', '//', '/./', '/../'. Amazon 60 // ML needs permissions to store and retrieve the logs on your behalf. For 61 // information about how to set permissions, see the Amazon Machine Learning 62 // Developer Guide (https://docs.aws.amazon.com/machine-learning/latest/dg). 63 // 64 // This member is required. 65 OutputUri *string 66 67 // A user-supplied name or description of the BatchPrediction. BatchPredictionName 68 // can only use the UTF-8 character set. 69 BatchPredictionName *string 70} 71 72// Represents the output of a CreateBatchPrediction operation, and is an 73// acknowledgement that Amazon ML received the request. The CreateBatchPrediction 74// operation is asynchronous. You can poll for status updates by using the 75// >GetBatchPrediction operation and checking the Status parameter of the result. 76type CreateBatchPredictionOutput struct { 77 78 // A user-supplied ID that uniquely identifies the BatchPrediction. This value is 79 // identical to the value of the BatchPredictionId in the request. 80 BatchPredictionId *string 81 82 // Metadata pertaining to the operation's result. 83 ResultMetadata middleware.Metadata 84} 85 86func addOperationCreateBatchPredictionMiddlewares(stack *middleware.Stack, options Options) (err error) { 87 err = stack.Serialize.Add(&awsAwsjson11_serializeOpCreateBatchPrediction{}, middleware.After) 88 if err != nil { 89 return err 90 } 91 err = stack.Deserialize.Add(&awsAwsjson11_deserializeOpCreateBatchPrediction{}, middleware.After) 92 if err != nil { 93 return err 94 } 95 if err = addSetLoggerMiddleware(stack, options); err != nil { 96 return err 97 } 98 if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil { 99 return err 100 } 101 if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil { 102 return err 103 } 104 if err = addResolveEndpointMiddleware(stack, options); err != nil { 105 return err 106 } 107 if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil { 108 return err 109 } 110 if err = addRetryMiddlewares(stack, options); err != nil { 111 return err 112 } 113 if err = addHTTPSignerV4Middleware(stack, options); err != nil { 114 return err 115 } 116 if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil { 117 return err 118 } 119 if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil { 120 return err 121 } 122 if err = addClientUserAgent(stack); err != nil { 123 return err 124 } 125 if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil { 126 return err 127 } 128 if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil { 129 return err 130 } 131 if err = addOpCreateBatchPredictionValidationMiddleware(stack); err != nil { 132 return err 133 } 134 if err = stack.Initialize.Add(newServiceMetadataMiddleware_opCreateBatchPrediction(options.Region), middleware.Before); err != nil { 135 return err 136 } 137 if err = addRequestIDRetrieverMiddleware(stack); err != nil { 138 return err 139 } 140 if err = addResponseErrorMiddleware(stack); err != nil { 141 return err 142 } 143 if err = addRequestResponseLogging(stack, options); err != nil { 144 return err 145 } 146 return nil 147} 148 149func newServiceMetadataMiddleware_opCreateBatchPrediction(region string) *awsmiddleware.RegisterServiceMetadata { 150 return &awsmiddleware.RegisterServiceMetadata{ 151 Region: region, 152 ServiceID: ServiceID, 153 SigningName: "machinelearning", 154 OperationName: "CreateBatchPrediction", 155 } 156} 157