1// Code generated by protoc-gen-go. DO NOT EDIT.
2// source: google/cloud/automl/v1beta1/io.proto
3
4package automl
5
6import (
7	fmt "fmt"
8	math "math"
9
10	proto "github.com/golang/protobuf/proto"
11	_ "google.golang.org/genproto/googleapis/api/annotations"
12)
13
14// Reference imports to suppress errors if they are not otherwise used.
15var _ = proto.Marshal
16var _ = fmt.Errorf
17var _ = math.Inf
18
19// This is a compile-time assertion to ensure that this generated file
20// is compatible with the proto package it is being compiled against.
21// A compilation error at this line likely means your copy of the
22// proto package needs to be updated.
23const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
24
25// Input configuration for ImportData Action.
26//
27// The format of input depends on dataset_metadata the Dataset into which
28// the import is happening has. As input source the
29// [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source]
30// is expected, unless specified otherwise. Additionally any input .CSV file
31// by itself must be 100MB or smaller, unless specified otherwise.
32// If an "example" file (that is, image, video etc.) with identical content
33// (even if it had different GCS_FILE_PATH) is mentioned multiple times, then
34// its label, bounding boxes etc. are appended. The same file should be always
35// provided with the same ML_USE and GCS_FILE_PATH, if it is not, then
36// these values are nondeterministically selected from the given ones.
37//
38// The formats are represented in EBNF with commas being literal and with
39// non-terminal symbols defined near the end of this comment. The formats are:
40//
41//  *  For Image Classification:
42//         CSV file(s) with each line in format:
43//           ML_USE,GCS_FILE_PATH,LABEL,LABEL,...
44//           GCS_FILE_PATH leads to image of up to 30MB in size. Supported
45//           extensions: .JPEG, .GIF, .PNG, .WEBP, .BMP, .TIFF, .ICO
46//           For MULTICLASS classification type, at most one LABEL is allowed
47//           per image. If an image has not yet been labeled, then it should be
48//           mentioned just once with no LABEL.
49//         Some sample rows:
50//           TRAIN,gs://folder/image1.jpg,daisy
51//           TEST,gs://folder/image2.jpg,dandelion,tulip,rose
52//           UNASSIGNED,gs://folder/image3.jpg,daisy
53//           UNASSIGNED,gs://folder/image4.jpg
54//
55//  *  For Image Object Detection:
56//         CSV file(s) with each line in format:
57//           ML_USE,GCS_FILE_PATH,(LABEL,BOUNDING_BOX | ,,,,,,,)
58//           GCS_FILE_PATH leads to image of up to 30MB in size. Supported
59//           extensions: .JPEG, .GIF, .PNG.
60//           Each image is assumed to be exhaustively labeled. The minimum
61//           allowed BOUNDING_BOX edge length is 0.01, and no more than 500
62//           BOUNDING_BOX-es per image are allowed (one BOUNDING_BOX is defined
63//           per line). If an image has not yet been labeled, then it should be
64//           mentioned just once with no LABEL and the ",,,,,,," in place of the
65//           BOUNDING_BOX. For images which are known to not contain any
66//           bounding boxes, they should be labelled explictly as
67//           "NEGATIVE_IMAGE", followed by ",,,,,,," in place of the
68//           BOUNDING_BOX.
69//         Sample rows:
70//           TRAIN,gs://folder/image1.png,car,0.1,0.1,,,0.3,0.3,,
71//           TRAIN,gs://folder/image1.png,bike,.7,.6,,,.8,.9,,
72//           UNASSIGNED,gs://folder/im2.png,car,0.1,0.1,0.2,0.1,0.2,0.3,0.1,0.3
73//           TEST,gs://folder/im3.png,,,,,,,,,
74//           TRAIN,gs://folder/im4.png,NEGATIVE_IMAGE,,,,,,,,,
75//
76//  *  For Video Classification:
77//         CSV file(s) with each line in format:
78//           ML_USE,GCS_FILE_PATH
79//           where ML_USE VALIDATE value should not be used. The GCS_FILE_PATH
80//           should lead to another .csv file which describes examples that have
81//           given ML_USE, using the following row format:
82//           GCS_FILE_PATH,(LABEL,TIME_SEGMENT_START,TIME_SEGMENT_END | ,,)
83//           Here GCS_FILE_PATH leads to a video of up to 50GB in size and up
84//           to 3h duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI.
85//           TIME_SEGMENT_START and TIME_SEGMENT_END must be within the
86//           length of the video, and end has to be after the start. Any segment
87//           of a video which has one or more labels on it, is considered a
88//           hard negative for all other labels. Any segment with no labels on
89//           it is considered to be unknown. If a whole video is unknown, then
90//           it shuold be mentioned just once with ",," in place of LABEL,
91//           TIME_SEGMENT_START,TIME_SEGMENT_END.
92//         Sample top level CSV file:
93//           TRAIN,gs://folder/train_videos.csv
94//           TEST,gs://folder/test_videos.csv
95//           UNASSIGNED,gs://folder/other_videos.csv
96//         Sample rows of a CSV file for a particular ML_USE:
97//           gs://folder/video1.avi,car,120,180.000021
98//           gs://folder/video1.avi,bike,150,180.000021
99//           gs://folder/vid2.avi,car,0,60.5
100//           gs://folder/vid3.avi,,,
101//
102//  *  For Video Object Tracking:
103//         CSV file(s) with each line in format:
104//           ML_USE,GCS_FILE_PATH
105//           where ML_USE VALIDATE value should not be used. The GCS_FILE_PATH
106//           should lead to another .csv file which describes examples that have
107//           given ML_USE, using one of the following row format:
108//           GCS_FILE_PATH,LABEL,[INSTANCE_ID],TIMESTAMP,BOUNDING_BOX
109//           or
110//           GCS_FILE_PATH,,,,,,,,,,
111//           Here GCS_FILE_PATH leads to a video of up to 50GB in size and up
112//           to 3h duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI.
113//           Providing INSTANCE_IDs can help to obtain a better model. When
114//           a specific labeled entity leaves the video frame, and shows up
115//           afterwards it is not required, albeit preferable, that the same
116//           INSTANCE_ID is given to it.
117//           TIMESTAMP must be within the length of the video, the
118//           BOUNDING_BOX is assumed to be drawn on the closest video's frame
119//           to the TIMESTAMP. Any mentioned by the TIMESTAMP frame is expected
120//           to be exhaustively labeled and no more than 500 BOUNDING_BOX-es per
121//           frame are allowed. If a whole video is unknown, then it should be
122//           mentioned just once with ",,,,,,,,,," in place of LABEL,
123//           [INSTANCE_ID],TIMESTAMP,BOUNDING_BOX.
124//         Sample top level CSV file:
125//           TRAIN,gs://folder/train_videos.csv
126//           TEST,gs://folder/test_videos.csv
127//           UNASSIGNED,gs://folder/other_videos.csv
128//         Seven sample rows of a CSV file for a particular ML_USE:
129//           gs://folder/video1.avi,car,1,12.10,0.8,0.8,0.9,0.8,0.9,0.9,0.8,0.9
130//           gs://folder/video1.avi,car,1,12.90,0.4,0.8,0.5,0.8,0.5,0.9,0.4,0.9
131//           gs://folder/video1.avi,car,2,12.10,.4,.2,.5,.2,.5,.3,.4,.3
132//           gs://folder/video1.avi,car,2,12.90,.8,.2,,,.9,.3,,
133//           gs://folder/video1.avi,bike,,12.50,.45,.45,,,.55,.55,,
134//           gs://folder/video2.avi,car,1,0,.1,.9,,,.9,.1,,
135//           gs://folder/video2.avi,,,,,,,,,,,
136//  *  For Text Extraction:
137//         CSV file(s) with each line in format:
138//           ML_USE,GCS_FILE_PATH
139//           GCS_FILE_PATH leads to a .JSONL (that is, JSON Lines) file which
140//           either imports text in-line or as documents. Any given
141//           .JSONL file must be 100MB or smaller.
142//           The in-line .JSONL file contains, per line, a proto that wraps a
143//           TextSnippet proto (in json representation) followed by one or more
144//           AnnotationPayload protos (called annotations), which have
145//           display_name and text_extraction detail populated. The given text
146//           is expected to be annotated exhaustively, for example, if you look
147//           for animals and text contains "dolphin" that is not labeled, then
148//           "dolphin" is assumed to not be an animal. Any given text snippet
149//           content must be 10KB or smaller, and also be UTF-8 NFC encoded
150//           (ASCII already is).
151//           The document .JSONL file contains, per line, a proto that wraps a
152//           Document proto. The Document proto must have either document_text
153//           or input_config set. In document_text case, the Document proto may
154//           also contain the spatial information of the document, including
155//           layout, document dimension and page number. In input_config case,
156//           only PDF documents are supported now, and each document may be up
157//           to 2MB large. Currently, annotations on documents cannot be
158//           specified at import.
159//         Three sample CSV rows:
160//           TRAIN,gs://folder/file1.jsonl
161//           VALIDATE,gs://folder/file2.jsonl
162//           TEST,gs://folder/file3.jsonl
163//         Sample in-line JSON Lines file for entity extraction (presented here
164//         with artificial line breaks, but the only actual line break is
165//         denoted by \n).:
166//           {
167//             "document": {
168//               "document_text": {"content": "dog cat"}
169//               "layout": [
170//                 {
171//                   "text_segment": {
172//                     "start_offset": 0,
173//                     "end_offset": 3,
174//                   },
175//                   "page_number": 1,
176//                   "bounding_poly": {
177//                     "normalized_vertices": [
178//                       {"x": 0.1, "y": 0.1},
179//                       {"x": 0.1, "y": 0.3},
180//                       {"x": 0.3, "y": 0.3},
181//                       {"x": 0.3, "y": 0.1},
182//                     ],
183//                   },
184//                   "text_segment_type": TOKEN,
185//                 },
186//                 {
187//                   "text_segment": {
188//                     "start_offset": 4,
189//                     "end_offset": 7,
190//                   },
191//                   "page_number": 1,
192//                   "bounding_poly": {
193//                     "normalized_vertices": [
194//                       {"x": 0.4, "y": 0.1},
195//                       {"x": 0.4, "y": 0.3},
196//                       {"x": 0.8, "y": 0.3},
197//                       {"x": 0.8, "y": 0.1},
198//                     ],
199//                   },
200//                   "text_segment_type": TOKEN,
201//                 }
202//
203//               ],
204//               "document_dimensions": {
205//                 "width": 8.27,
206//                 "height": 11.69,
207//                 "unit": INCH,
208//               }
209//               "page_count": 1,
210//             },
211//             "annotations": [
212//               {
213//                 "display_name": "animal",
214//                 "text_extraction": {"text_segment": {"start_offset": 0,
215//                 "end_offset": 3}}
216//               },
217//               {
218//                 "display_name": "animal",
219//                 "text_extraction": {"text_segment": {"start_offset": 4,
220//                 "end_offset": 7}}
221//               }
222//             ],
223//           }\n
224//           {
225//              "text_snippet": {
226//                "content": "This dog is good."
227//              },
228//              "annotations": [
229//                {
230//                  "display_name": "animal",
231//                  "text_extraction": {
232//                    "text_segment": {"start_offset": 5, "end_offset": 8}
233//                  }
234//                }
235//              ]
236//           }
237//         Sample document JSON Lines file (presented here with artificial line
238//         breaks, but the only actual line break is denoted by \n).:
239//           {
240//             "document": {
241//               "input_config": {
242//                 "gcs_source": { "input_uris": [ "gs://folder/document1.pdf" ]
243//                 }
244//               }
245//             }
246//           }\n
247//           {
248//             "document": {
249//               "input_config": {
250//                 "gcs_source": { "input_uris": [ "gs://folder/document2.pdf" ]
251//                 }
252//               }
253//             }
254//           }
255//
256//  *  For Text Classification:
257//         CSV file(s) with each line in format:
258//           ML_USE,(TEXT_SNIPPET | GCS_FILE_PATH),LABEL,LABEL,...
259//           TEXT_SNIPPET and GCS_FILE_PATH are distinguished by a pattern. If
260//           the column content is a valid gcs file path, i.e. prefixed by
261//           "gs://", it will be treated as a GCS_FILE_PATH, else if the content
262//           is enclosed within double quotes (""), it is
263//           treated as a TEXT_SNIPPET. In the GCS_FILE_PATH case, the path
264//           must lead to a .txt file with UTF-8 encoding, for example,
265//           "gs://folder/content.txt", and the content in it is extracted
266//           as a text snippet. In TEXT_SNIPPET case, the column content
267//           excluding quotes is treated as to be imported text snippet. In
268//           both cases, the text snippet/file size must be within 128kB.
269//           Maximum 100 unique labels are allowed per CSV row.
270//         Sample rows:
271//           TRAIN,"They have bad food and very rude",RudeService,BadFood
272//           TRAIN,gs://folder/content.txt,SlowService
273//           TEST,"Typically always bad service there.",RudeService
274//           VALIDATE,"Stomach ache to go.",BadFood
275//
276//  *  For Text Sentiment:
277//         CSV file(s) with each line in format:
278//           ML_USE,(TEXT_SNIPPET | GCS_FILE_PATH),SENTIMENT
279//           TEXT_SNIPPET and GCS_FILE_PATH are distinguished by a pattern. If
280//           the column content is a valid gcs file path, that is, prefixed by
281//           "gs://", it is treated as a GCS_FILE_PATH, otherwise it is treated
282//           as a TEXT_SNIPPET. In the GCS_FILE_PATH case, the path
283//           must lead to a .txt file with UTF-8 encoding, for example,
284//           "gs://folder/content.txt", and the content in it is extracted
285//           as a text snippet. In TEXT_SNIPPET case, the column content itself
286//           is treated as to be imported text snippet. In both cases, the
287//           text snippet must be up to 500 characters long.
288//         Sample rows:
289//           TRAIN,"@freewrytin this is way too good for your product",2
290//           TRAIN,"I need this product so bad",3
291//           TEST,"Thank you for this product.",4
292//           VALIDATE,gs://folder/content.txt,2
293//
294//   *  For Tables:
295//         Either
296//         [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source] or
297//
298// [bigquery_source][google.cloud.automl.v1beta1.InputConfig.bigquery_source]
299//         can be used. All inputs is concatenated into a single
300//
301// [primary_table][google.cloud.automl.v1beta1.TablesDatasetMetadata.primary_table_name]
302//         For gcs_source:
303//           CSV file(s), where the first row of the first file is the header,
304//           containing unique column names. If the first row of a subsequent
305//           file is the same as the header, then it is also treated as a
306//           header. All other rows contain values for the corresponding
307//           columns.
308//           Each .CSV file by itself must be 10GB or smaller, and their total
309//           size must be 100GB or smaller.
310//           First three sample rows of a CSV file:
311//           "Id","First Name","Last Name","Dob","Addresses"
312//
313// "1","John","Doe","1968-01-22","[{"status":"current","address":"123_First_Avenue","city":"Seattle","state":"WA","zip":"11111","numberOfYears":"1"},{"status":"previous","address":"456_Main_Street","city":"Portland","state":"OR","zip":"22222","numberOfYears":"5"}]"
314//
315// "2","Jane","Doe","1980-10-16","[{"status":"current","address":"789_Any_Avenue","city":"Albany","state":"NY","zip":"33333","numberOfYears":"2"},{"status":"previous","address":"321_Main_Street","city":"Hoboken","state":"NJ","zip":"44444","numberOfYears":"3"}]}
316//         For bigquery_source:
317//           An URI of a BigQuery table. The user data size of the BigQuery
318//           table must be 100GB or smaller.
319//         An imported table must have between 2 and 1,000 columns, inclusive,
320//         and between 1000 and 100,000,000 rows, inclusive. There are at most 5
321//         import data running in parallel.
322//  Definitions:
323//  ML_USE = "TRAIN" | "VALIDATE" | "TEST" | "UNASSIGNED"
324//           Describes how the given example (file) should be used for model
325//           training. "UNASSIGNED" can be used when user has no preference.
326//  GCS_FILE_PATH = A path to file on GCS, e.g. "gs://folder/image1.png".
327//  LABEL = A display name of an object on an image, video etc., e.g. "dog".
328//          Must be up to 32 characters long and can consist only of ASCII
329//          Latin letters A-Z and a-z, underscores(_), and ASCII digits 0-9.
330//          For each label an AnnotationSpec is created which display_name
331//          becomes the label; AnnotationSpecs are given back in predictions.
332//  INSTANCE_ID = A positive integer that identifies a specific instance of a
333//                labeled entity on an example. Used e.g. to track two cars on
334//                a video while being able to tell apart which one is which.
335//  BOUNDING_BOX = VERTEX,VERTEX,VERTEX,VERTEX | VERTEX,,,VERTEX,,
336//                 A rectangle parallel to the frame of the example (image,
337//                 video). If 4 vertices are given they are connected by edges
338//                 in the order provided, if 2 are given they are recognized
339//                 as diagonally opposite vertices of the rectangle.
340//  VERTEX = COORDINATE,COORDINATE
341//           First coordinate is horizontal (x), the second is vertical (y).
342//  COORDINATE = A float in 0 to 1 range, relative to total length of
343//               image or video in given dimension. For fractions the
344//               leading non-decimal 0 can be omitted (i.e. 0.3 = .3).
345//               Point 0,0 is in top left.
346//  TIME_SEGMENT_START = TIME_OFFSET
347//                       Expresses a beginning, inclusive, of a time segment
348//                       within an example that has a time dimension
349//                       (e.g. video).
350//  TIME_SEGMENT_END = TIME_OFFSET
351//                     Expresses an end, exclusive, of a time segment within
352//                     an example that has a time dimension (e.g. video).
353//  TIME_OFFSET = A number of seconds as measured from the start of an
354//                example (e.g. video). Fractions are allowed, up to a
355//                microsecond precision. "inf" is allowed, and it means the end
356//                of the example.
357//  TEXT_SNIPPET = A content of a text snippet, UTF-8 encoded, enclosed within
358//                 double quotes ("").
359//  SENTIMENT = An integer between 0 and
360//              Dataset.text_sentiment_dataset_metadata.sentiment_max
361//              (inclusive). Describes the ordinal of the sentiment - higher
362//              value means a more positive sentiment. All the values are
363//              completely relative, i.e. neither 0 needs to mean a negative or
364//              neutral sentiment nor sentiment_max needs to mean a positive one
365//              - it is just required that 0 is the least positive sentiment
366//              in the data, and sentiment_max is the  most positive one.
367//              The SENTIMENT shouldn't be confused with "score" or "magnitude"
368//              from the previous Natural Language Sentiment Analysis API.
369//              All SENTIMENT values between 0 and sentiment_max must be
370//              represented in the imported data. On prediction the same 0 to
371//              sentiment_max range will be used. The difference between
372//              neighboring sentiment values needs not to be uniform, e.g. 1 and
373//              2 may be similar whereas the difference between 2 and 3 may be
374//              huge.
375//
376//  Errors:
377//  If any of the provided CSV files can't be parsed or if more than certain
378//  percent of CSV rows cannot be processed then the operation fails and
379//  nothing is imported. Regardless of overall success or failure the per-row
380//  failures, up to a certain count cap, is listed in
381//  Operation.metadata.partial_failures.
382//
383type InputConfig struct {
384	// The source of the input.
385	//
386	// Types that are valid to be assigned to Source:
387	//	*InputConfig_GcsSource
388	//	*InputConfig_BigquerySource
389	Source isInputConfig_Source `protobuf_oneof:"source"`
390	// Additional domain-specific parameters describing the semantic of the
391	// imported data, any string must be up to 25000
392	// characters long.
393	//
394	// *  For Tables:
395	//    `schema_inference_version` - (integer) Required. The version of the
396	//        algorithm that should be used for the initial inference of the
397	//        schema (columns' DataTypes) of the table the data is being imported
398	//        into. Allowed values: "1".
399	Params               map[string]string `protobuf:"bytes,2,rep,name=params,proto3" json:"params,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
400	XXX_NoUnkeyedLiteral struct{}          `json:"-"`
401	XXX_unrecognized     []byte            `json:"-"`
402	XXX_sizecache        int32             `json:"-"`
403}
404
405func (m *InputConfig) Reset()         { *m = InputConfig{} }
406func (m *InputConfig) String() string { return proto.CompactTextString(m) }
407func (*InputConfig) ProtoMessage()    {}
408func (*InputConfig) Descriptor() ([]byte, []int) {
409	return fileDescriptor_6e2d768504aa30d7, []int{0}
410}
411
412func (m *InputConfig) XXX_Unmarshal(b []byte) error {
413	return xxx_messageInfo_InputConfig.Unmarshal(m, b)
414}
415func (m *InputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
416	return xxx_messageInfo_InputConfig.Marshal(b, m, deterministic)
417}
418func (m *InputConfig) XXX_Merge(src proto.Message) {
419	xxx_messageInfo_InputConfig.Merge(m, src)
420}
421func (m *InputConfig) XXX_Size() int {
422	return xxx_messageInfo_InputConfig.Size(m)
423}
424func (m *InputConfig) XXX_DiscardUnknown() {
425	xxx_messageInfo_InputConfig.DiscardUnknown(m)
426}
427
428var xxx_messageInfo_InputConfig proto.InternalMessageInfo
429
430type isInputConfig_Source interface {
431	isInputConfig_Source()
432}
433
434type InputConfig_GcsSource struct {
435	GcsSource *GcsSource `protobuf:"bytes,1,opt,name=gcs_source,json=gcsSource,proto3,oneof"`
436}
437
438type InputConfig_BigquerySource struct {
439	BigquerySource *BigQuerySource `protobuf:"bytes,3,opt,name=bigquery_source,json=bigquerySource,proto3,oneof"`
440}
441
442func (*InputConfig_GcsSource) isInputConfig_Source() {}
443
444func (*InputConfig_BigquerySource) isInputConfig_Source() {}
445
446func (m *InputConfig) GetSource() isInputConfig_Source {
447	if m != nil {
448		return m.Source
449	}
450	return nil
451}
452
453func (m *InputConfig) GetGcsSource() *GcsSource {
454	if x, ok := m.GetSource().(*InputConfig_GcsSource); ok {
455		return x.GcsSource
456	}
457	return nil
458}
459
460func (m *InputConfig) GetBigquerySource() *BigQuerySource {
461	if x, ok := m.GetSource().(*InputConfig_BigquerySource); ok {
462		return x.BigquerySource
463	}
464	return nil
465}
466
467func (m *InputConfig) GetParams() map[string]string {
468	if m != nil {
469		return m.Params
470	}
471	return nil
472}
473
474// XXX_OneofWrappers is for the internal use of the proto package.
475func (*InputConfig) XXX_OneofWrappers() []interface{} {
476	return []interface{}{
477		(*InputConfig_GcsSource)(nil),
478		(*InputConfig_BigquerySource)(nil),
479	}
480}
481
482// Input configuration for BatchPredict Action.
483//
484// The format of input depends on the ML problem of the model used for
485// prediction. As input source the
486// [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source]
487// is expected, unless specified otherwise.
488//
489// The formats are represented in EBNF with commas being literal and with
490// non-terminal symbols defined near the end of this comment. The formats
491// are:
492//
493//  *  For Image Classification:
494//         CSV file(s) with each line having just a single column:
495//           GCS_FILE_PATH
496//           which leads to image of up to 30MB in size. Supported
497//           extensions: .JPEG, .GIF, .PNG. This path is treated as the ID in
498//           the Batch predict output.
499//         Three sample rows:
500//           gs://folder/image1.jpeg
501//           gs://folder/image2.gif
502//           gs://folder/image3.png
503//
504//  *  For Image Object Detection:
505//         CSV file(s) with each line having just a single column:
506//           GCS_FILE_PATH
507//           which leads to image of up to 30MB in size. Supported
508//           extensions: .JPEG, .GIF, .PNG. This path is treated as the ID in
509//           the Batch predict output.
510//         Three sample rows:
511//           gs://folder/image1.jpeg
512//           gs://folder/image2.gif
513//           gs://folder/image3.png
514//  *  For Video Classification:
515//         CSV file(s) with each line in format:
516//           GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END
517//           GCS_FILE_PATH leads to video of up to 50GB in size and up to 3h
518//           duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI.
519//           TIME_SEGMENT_START and TIME_SEGMENT_END must be within the
520//           length of the video, and end has to be after the start.
521//         Three sample rows:
522//           gs://folder/video1.mp4,10,40
523//           gs://folder/video1.mp4,20,60
524//           gs://folder/vid2.mov,0,inf
525//
526//  *  For Video Object Tracking:
527//         CSV file(s) with each line in format:
528//           GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END
529//           GCS_FILE_PATH leads to video of up to 50GB in size and up to 3h
530//           duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI.
531//           TIME_SEGMENT_START and TIME_SEGMENT_END must be within the
532//           length of the video, and end has to be after the start.
533//         Three sample rows:
534//           gs://folder/video1.mp4,10,240
535//           gs://folder/video1.mp4,300,360
536//           gs://folder/vid2.mov,0,inf
537//  *  For Text Classification:
538//         CSV file(s) with each line having just a single column:
539//           GCS_FILE_PATH | TEXT_SNIPPET
540//         Any given text file can have size upto 128kB.
541//         Any given text snippet content must have 60,000 characters or less.
542//         Three sample rows:
543//           gs://folder/text1.txt
544//           "Some text content to predict"
545//           gs://folder/text3.pdf
546//         Supported file extensions: .txt, .pdf
547//
548//  *  For Text Sentiment:
549//         CSV file(s) with each line having just a single column:
550//           GCS_FILE_PATH | TEXT_SNIPPET
551//         Any given text file can have size upto 128kB.
552//         Any given text snippet content must have 500 characters or less.
553//         Three sample rows:
554//           gs://folder/text1.txt
555//           "Some text content to predict"
556//           gs://folder/text3.pdf
557//         Supported file extensions: .txt, .pdf
558//
559//  * For Text Extraction
560//         .JSONL (i.e. JSON Lines) file(s) which either provide text in-line or
561//         as documents (for a single BatchPredict call only one of the these
562//         formats may be used).
563//         The in-line .JSONL file(s) contain per line a proto that
564//           wraps a temporary user-assigned TextSnippet ID (string up to 2000
565//           characters long) called "id", a TextSnippet proto (in
566//           json representation) and zero or more TextFeature protos. Any given
567//           text snippet content must have 30,000 characters or less, and also
568//           be UTF-8 NFC encoded (ASCII already is). The IDs provided should be
569//           unique.
570//         The document .JSONL file(s) contain, per line, a proto that wraps a
571//           Document proto with input_config set. Only PDF documents are
572//           supported now, and each document must be up to 2MB large.
573//         Any given .JSONL file must be 100MB or smaller, and no more than 20
574//         files may be given.
575//         Sample in-line JSON Lines file (presented here with artificial line
576//         breaks, but the only actual line break is denoted by \n):
577//           {
578//             "id": "my_first_id",
579//             "text_snippet": { "content": "dog car cat"},
580//             "text_features": [
581//               {
582//                 "text_segment": {"start_offset": 4, "end_offset": 6},
583//                 "structural_type": PARAGRAPH,
584//                 "bounding_poly": {
585//                   "normalized_vertices": [
586//                     {"x": 0.1, "y": 0.1},
587//                     {"x": 0.1, "y": 0.3},
588//                     {"x": 0.3, "y": 0.3},
589//                     {"x": 0.3, "y": 0.1},
590//                   ]
591//                 },
592//               }
593//             ],
594//           }\n
595//           {
596//             "id": "2",
597//             "text_snippet": {
598//               "content": "An elaborate content",
599//               "mime_type": "text/plain"
600//             }
601//           }
602//         Sample document JSON Lines file (presented here with artificial line
603//         breaks, but the only actual line break is denoted by \n).:
604//           {
605//             "document": {
606//               "input_config": {
607//                 "gcs_source": { "input_uris": [ "gs://folder/document1.pdf" ]
608//                 }
609//               }
610//             }
611//           }\n
612//           {
613//             "document": {
614//               "input_config": {
615//                 "gcs_source": { "input_uris": [ "gs://folder/document2.pdf" ]
616//                 }
617//               }
618//             }
619//           }
620//
621//  *  For Tables:
622//         Either
623//         [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source] or
624//
625// [bigquery_source][google.cloud.automl.v1beta1.InputConfig.bigquery_source].
626//         GCS case:
627//           CSV file(s), each by itself 10GB or smaller and total size must be
628//           100GB or smaller, where first file must have a header containing
629//           column names. If the first row of a subsequent file is the same as
630//           the header, then it is also treated as a header. All other rows
631//           contain values for the corresponding columns.
632//           The column names must contain the model's
633//
634// [input_feature_column_specs'][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs]
635//
636// [display_name-s][google.cloud.automl.v1beta1.ColumnSpec.display_name]
637//           (order doesn't matter). The columns corresponding to the model's
638//           input feature column specs must contain values compatible with the
639//           column spec's data types. Prediction on all the rows, i.e. the CSV
640//           lines, will be attempted. For FORECASTING
641//
642// [prediction_type][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]:
643//           all columns having
644//
645// [TIME_SERIES_AVAILABLE_PAST_ONLY][google.cloud.automl.v1beta1.ColumnSpec.ForecastingMetadata.ColumnType]
646//           type will be ignored.
647//           First three sample rows of a CSV file:
648//             "First Name","Last Name","Dob","Addresses"
649//
650// "John","Doe","1968-01-22","[{"status":"current","address":"123_First_Avenue","city":"Seattle","state":"WA","zip":"11111","numberOfYears":"1"},{"status":"previous","address":"456_Main_Street","city":"Portland","state":"OR","zip":"22222","numberOfYears":"5"}]"
651//
652// "Jane","Doe","1980-10-16","[{"status":"current","address":"789_Any_Avenue","city":"Albany","state":"NY","zip":"33333","numberOfYears":"2"},{"status":"previous","address":"321_Main_Street","city":"Hoboken","state":"NJ","zip":"44444","numberOfYears":"3"}]}
653//         BigQuery case:
654//           An URI of a BigQuery table. The user data size of the BigQuery
655//           table must be 100GB or smaller.
656//           The column names must contain the model's
657//
658// [input_feature_column_specs'][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs]
659//
660// [display_name-s][google.cloud.automl.v1beta1.ColumnSpec.display_name]
661//           (order doesn't matter). The columns corresponding to the model's
662//           input feature column specs must contain values compatible with the
663//           column spec's data types. Prediction on all the rows of the table
664//           will be attempted. For FORECASTING
665//
666// [prediction_type][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]:
667//           all columns having
668//
669// [TIME_SERIES_AVAILABLE_PAST_ONLY][google.cloud.automl.v1beta1.ColumnSpec.ForecastingMetadata.ColumnType]
670//           type will be ignored.
671//
672//  Definitions:
673//  GCS_FILE_PATH = A path to file on GCS, e.g. "gs://folder/video.avi".
674//  TEXT_SNIPPET = A content of a text snippet, UTF-8 encoded, enclosed within
675//                 double quotes ("")
676//  TIME_SEGMENT_START = TIME_OFFSET
677//                       Expresses a beginning, inclusive, of a time segment
678//                       within an
679//                       example that has a time dimension (e.g. video).
680//  TIME_SEGMENT_END = TIME_OFFSET
681//                     Expresses an end, exclusive, of a time segment within
682//                     an example that has a time dimension (e.g. video).
683//  TIME_OFFSET = A number of seconds as measured from the start of an
684//                example (e.g. video). Fractions are allowed, up to a
685//                microsecond precision. "inf" is allowed and it means the end
686//                of the example.
687//
688//  Errors:
689//  If any of the provided CSV files can't be parsed or if more than certain
690//  percent of CSV rows cannot be processed then the operation fails and
691//  prediction does not happen. Regardless of overall success or failure the
692//  per-row failures, up to a certain count cap, will be listed in
693//  Operation.metadata.partial_failures.
694type BatchPredictInputConfig struct {
695	// Required. The source of the input.
696	//
697	// Types that are valid to be assigned to Source:
698	//	*BatchPredictInputConfig_GcsSource
699	//	*BatchPredictInputConfig_BigquerySource
700	Source               isBatchPredictInputConfig_Source `protobuf_oneof:"source"`
701	XXX_NoUnkeyedLiteral struct{}                         `json:"-"`
702	XXX_unrecognized     []byte                           `json:"-"`
703	XXX_sizecache        int32                            `json:"-"`
704}
705
706func (m *BatchPredictInputConfig) Reset()         { *m = BatchPredictInputConfig{} }
707func (m *BatchPredictInputConfig) String() string { return proto.CompactTextString(m) }
708func (*BatchPredictInputConfig) ProtoMessage()    {}
709func (*BatchPredictInputConfig) Descriptor() ([]byte, []int) {
710	return fileDescriptor_6e2d768504aa30d7, []int{1}
711}
712
713func (m *BatchPredictInputConfig) XXX_Unmarshal(b []byte) error {
714	return xxx_messageInfo_BatchPredictInputConfig.Unmarshal(m, b)
715}
716func (m *BatchPredictInputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
717	return xxx_messageInfo_BatchPredictInputConfig.Marshal(b, m, deterministic)
718}
719func (m *BatchPredictInputConfig) XXX_Merge(src proto.Message) {
720	xxx_messageInfo_BatchPredictInputConfig.Merge(m, src)
721}
722func (m *BatchPredictInputConfig) XXX_Size() int {
723	return xxx_messageInfo_BatchPredictInputConfig.Size(m)
724}
725func (m *BatchPredictInputConfig) XXX_DiscardUnknown() {
726	xxx_messageInfo_BatchPredictInputConfig.DiscardUnknown(m)
727}
728
729var xxx_messageInfo_BatchPredictInputConfig proto.InternalMessageInfo
730
731type isBatchPredictInputConfig_Source interface {
732	isBatchPredictInputConfig_Source()
733}
734
735type BatchPredictInputConfig_GcsSource struct {
736	GcsSource *GcsSource `protobuf:"bytes,1,opt,name=gcs_source,json=gcsSource,proto3,oneof"`
737}
738
739type BatchPredictInputConfig_BigquerySource struct {
740	BigquerySource *BigQuerySource `protobuf:"bytes,2,opt,name=bigquery_source,json=bigquerySource,proto3,oneof"`
741}
742
743func (*BatchPredictInputConfig_GcsSource) isBatchPredictInputConfig_Source() {}
744
745func (*BatchPredictInputConfig_BigquerySource) isBatchPredictInputConfig_Source() {}
746
747func (m *BatchPredictInputConfig) GetSource() isBatchPredictInputConfig_Source {
748	if m != nil {
749		return m.Source
750	}
751	return nil
752}
753
754func (m *BatchPredictInputConfig) GetGcsSource() *GcsSource {
755	if x, ok := m.GetSource().(*BatchPredictInputConfig_GcsSource); ok {
756		return x.GcsSource
757	}
758	return nil
759}
760
761func (m *BatchPredictInputConfig) GetBigquerySource() *BigQuerySource {
762	if x, ok := m.GetSource().(*BatchPredictInputConfig_BigquerySource); ok {
763		return x.BigquerySource
764	}
765	return nil
766}
767
768// XXX_OneofWrappers is for the internal use of the proto package.
769func (*BatchPredictInputConfig) XXX_OneofWrappers() []interface{} {
770	return []interface{}{
771		(*BatchPredictInputConfig_GcsSource)(nil),
772		(*BatchPredictInputConfig_BigquerySource)(nil),
773	}
774}
775
776// Input configuration of a [Document][google.cloud.automl.v1beta1.Document].
777type DocumentInputConfig struct {
778	// The Google Cloud Storage location of the document file. Only a single path
779	// should be given.
780	// Max supported size: 512MB.
781	// Supported extensions: .PDF.
782	GcsSource            *GcsSource `protobuf:"bytes,1,opt,name=gcs_source,json=gcsSource,proto3" json:"gcs_source,omitempty"`
783	XXX_NoUnkeyedLiteral struct{}   `json:"-"`
784	XXX_unrecognized     []byte     `json:"-"`
785	XXX_sizecache        int32      `json:"-"`
786}
787
788func (m *DocumentInputConfig) Reset()         { *m = DocumentInputConfig{} }
789func (m *DocumentInputConfig) String() string { return proto.CompactTextString(m) }
790func (*DocumentInputConfig) ProtoMessage()    {}
791func (*DocumentInputConfig) Descriptor() ([]byte, []int) {
792	return fileDescriptor_6e2d768504aa30d7, []int{2}
793}
794
795func (m *DocumentInputConfig) XXX_Unmarshal(b []byte) error {
796	return xxx_messageInfo_DocumentInputConfig.Unmarshal(m, b)
797}
798func (m *DocumentInputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
799	return xxx_messageInfo_DocumentInputConfig.Marshal(b, m, deterministic)
800}
801func (m *DocumentInputConfig) XXX_Merge(src proto.Message) {
802	xxx_messageInfo_DocumentInputConfig.Merge(m, src)
803}
804func (m *DocumentInputConfig) XXX_Size() int {
805	return xxx_messageInfo_DocumentInputConfig.Size(m)
806}
807func (m *DocumentInputConfig) XXX_DiscardUnknown() {
808	xxx_messageInfo_DocumentInputConfig.DiscardUnknown(m)
809}
810
811var xxx_messageInfo_DocumentInputConfig proto.InternalMessageInfo
812
813func (m *DocumentInputConfig) GetGcsSource() *GcsSource {
814	if m != nil {
815		return m.GcsSource
816	}
817	return nil
818}
819
820// *  For Translation:
821//         CSV file `translation.csv`, with each line in format:
822//         ML_USE,GCS_FILE_PATH
823//         GCS_FILE_PATH leads to a .TSV file which describes examples that have
824//         given ML_USE, using the following row format per line:
825//         TEXT_SNIPPET (in source language) \t TEXT_SNIPPET (in target
826//         language)
827//
828//   *  For Tables:
829//         Output depends on whether the dataset was imported from GCS or
830//         BigQuery.
831//         GCS case:
832//
833// [gcs_destination][google.cloud.automl.v1beta1.OutputConfig.gcs_destination]
834//           must be set. Exported are CSV file(s) `tables_1.csv`,
835//           `tables_2.csv`,...,`tables_N.csv` with each having as header line
836//           the table's column names, and all other lines contain values for
837//           the header columns.
838//         BigQuery case:
839//
840// [bigquery_destination][google.cloud.automl.v1beta1.OutputConfig.bigquery_destination]
841//           pointing to a BigQuery project must be set. In the given project a
842//           new dataset will be created with name
843//
844// `export_data_<automl-dataset-display-name>_<timestamp-of-export-call>`
845//           where <automl-dataset-display-name> will be made
846//           BigQuery-dataset-name compatible (e.g. most special characters will
847//           become underscores), and timestamp will be in
848//           YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In that
849//           dataset a new table called `primary_table` will be created, and
850//           filled with precisely the same data as this obtained on import.
851type OutputConfig struct {
852	// Required. The destination of the output.
853	//
854	// Types that are valid to be assigned to Destination:
855	//	*OutputConfig_GcsDestination
856	//	*OutputConfig_BigqueryDestination
857	Destination          isOutputConfig_Destination `protobuf_oneof:"destination"`
858	XXX_NoUnkeyedLiteral struct{}                   `json:"-"`
859	XXX_unrecognized     []byte                     `json:"-"`
860	XXX_sizecache        int32                      `json:"-"`
861}
862
863func (m *OutputConfig) Reset()         { *m = OutputConfig{} }
864func (m *OutputConfig) String() string { return proto.CompactTextString(m) }
865func (*OutputConfig) ProtoMessage()    {}
866func (*OutputConfig) Descriptor() ([]byte, []int) {
867	return fileDescriptor_6e2d768504aa30d7, []int{3}
868}
869
870func (m *OutputConfig) XXX_Unmarshal(b []byte) error {
871	return xxx_messageInfo_OutputConfig.Unmarshal(m, b)
872}
873func (m *OutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
874	return xxx_messageInfo_OutputConfig.Marshal(b, m, deterministic)
875}
876func (m *OutputConfig) XXX_Merge(src proto.Message) {
877	xxx_messageInfo_OutputConfig.Merge(m, src)
878}
879func (m *OutputConfig) XXX_Size() int {
880	return xxx_messageInfo_OutputConfig.Size(m)
881}
882func (m *OutputConfig) XXX_DiscardUnknown() {
883	xxx_messageInfo_OutputConfig.DiscardUnknown(m)
884}
885
886var xxx_messageInfo_OutputConfig proto.InternalMessageInfo
887
888type isOutputConfig_Destination interface {
889	isOutputConfig_Destination()
890}
891
892type OutputConfig_GcsDestination struct {
893	GcsDestination *GcsDestination `protobuf:"bytes,1,opt,name=gcs_destination,json=gcsDestination,proto3,oneof"`
894}
895
896type OutputConfig_BigqueryDestination struct {
897	BigqueryDestination *BigQueryDestination `protobuf:"bytes,2,opt,name=bigquery_destination,json=bigqueryDestination,proto3,oneof"`
898}
899
900func (*OutputConfig_GcsDestination) isOutputConfig_Destination() {}
901
902func (*OutputConfig_BigqueryDestination) isOutputConfig_Destination() {}
903
904func (m *OutputConfig) GetDestination() isOutputConfig_Destination {
905	if m != nil {
906		return m.Destination
907	}
908	return nil
909}
910
911func (m *OutputConfig) GetGcsDestination() *GcsDestination {
912	if x, ok := m.GetDestination().(*OutputConfig_GcsDestination); ok {
913		return x.GcsDestination
914	}
915	return nil
916}
917
918func (m *OutputConfig) GetBigqueryDestination() *BigQueryDestination {
919	if x, ok := m.GetDestination().(*OutputConfig_BigqueryDestination); ok {
920		return x.BigqueryDestination
921	}
922	return nil
923}
924
925// XXX_OneofWrappers is for the internal use of the proto package.
926func (*OutputConfig) XXX_OneofWrappers() []interface{} {
927	return []interface{}{
928		(*OutputConfig_GcsDestination)(nil),
929		(*OutputConfig_BigqueryDestination)(nil),
930	}
931}
932
933// Output configuration for BatchPredict Action.
934//
935// As destination the
936//
937// [gcs_destination][google.cloud.automl.v1beta1.BatchPredictOutputConfig.gcs_destination]
938// must be set unless specified otherwise for a domain. If gcs_destination is
939// set then in the given directory a new directory is created. Its name
940// will be
941// "prediction-<model-display-name>-<timestamp-of-prediction-call>",
942// where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. The contents
943// of it depends on the ML problem the predictions are made for.
944//
945//  *  For Image Classification:
946//         In the created directory files `image_classification_1.jsonl`,
947//         `image_classification_2.jsonl`,...,`image_classification_N.jsonl`
948//         will be created, where N may be 1, and depends on the
949//         total number of the successfully predicted images and annotations.
950//         A single image will be listed only once with all its annotations,
951//         and its annotations will never be split across files.
952//         Each .JSONL file will contain, per line, a JSON representation of a
953//         proto that wraps image's "ID" : "<id_value>" followed by a list of
954//         zero or more AnnotationPayload protos (called annotations), which
955//         have classification detail populated.
956//         If prediction for any image failed (partially or completely), then an
957//         additional `errors_1.jsonl`, `errors_2.jsonl`,..., `errors_N.jsonl`
958//         files will be created (N depends on total number of failed
959//         predictions). These files will have a JSON representation of a proto
960//         that wraps the same "ID" : "<id_value>" but here followed by
961//         exactly one
962//
963// [`google.rpc.Status`](https:
964// //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto)
965//         containing only `code` and `message`fields.
966//
967//  *  For Image Object Detection:
968//         In the created directory files `image_object_detection_1.jsonl`,
969//         `image_object_detection_2.jsonl`,...,`image_object_detection_N.jsonl`
970//         will be created, where N may be 1, and depends on the
971//         total number of the successfully predicted images and annotations.
972//         Each .JSONL file will contain, per line, a JSON representation of a
973//         proto that wraps image's "ID" : "<id_value>" followed by a list of
974//         zero or more AnnotationPayload protos (called annotations), which
975//         have image_object_detection detail populated. A single image will
976//         be listed only once with all its annotations, and its annotations
977//         will never be split across files.
978//         If prediction for any image failed (partially or completely), then
979//         additional `errors_1.jsonl`, `errors_2.jsonl`,..., `errors_N.jsonl`
980//         files will be created (N depends on total number of failed
981//         predictions). These files will have a JSON representation of a proto
982//         that wraps the same "ID" : "<id_value>" but here followed by
983//         exactly one
984//
985// [`google.rpc.Status`](https:
986// //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto)
987//         containing only `code` and `message`fields.
988//  *  For Video Classification:
989//         In the created directory a video_classification.csv file, and a .JSON
990//         file per each video classification requested in the input (i.e. each
991//         line in given CSV(s)), will be created.
992//
993//         The format of video_classification.csv is:
994//
995// GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END,JSON_FILE_NAME,STATUS
996//         where:
997//         GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END = matches 1 to 1
998//             the prediction input lines (i.e. video_classification.csv has
999//             precisely the same number of lines as the prediction input had.)
1000//         JSON_FILE_NAME = Name of .JSON file in the output directory, which
1001//             contains prediction responses for the video time segment.
1002//         STATUS = "OK" if prediction completed successfully, or an error code
1003//             with message otherwise. If STATUS is not "OK" then the .JSON file
1004//             for that line may not exist or be empty.
1005//
1006//         Each .JSON file, assuming STATUS is "OK", will contain a list of
1007//         AnnotationPayload protos in JSON format, which are the predictions
1008//         for the video time segment the file is assigned to in the
1009//         video_classification.csv. All AnnotationPayload protos will have
1010//         video_classification field set, and will be sorted by
1011//         video_classification.type field (note that the returned types are
1012//         governed by `classifaction_types` parameter in
1013//         [PredictService.BatchPredictRequest.params][]).
1014//
1015//  *  For Video Object Tracking:
1016//         In the created directory a video_object_tracking.csv file will be
1017//         created, and multiple files video_object_trackinng_1.json,
1018//         video_object_trackinng_2.json,..., video_object_trackinng_N.json,
1019//         where N is the number of requests in the input (i.e. the number of
1020//         lines in given CSV(s)).
1021//
1022//         The format of video_object_tracking.csv is:
1023//
1024// GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END,JSON_FILE_NAME,STATUS
1025//         where:
1026//         GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END = matches 1 to 1
1027//             the prediction input lines (i.e. video_object_tracking.csv has
1028//             precisely the same number of lines as the prediction input had.)
1029//         JSON_FILE_NAME = Name of .JSON file in the output directory, which
1030//             contains prediction responses for the video time segment.
1031//         STATUS = "OK" if prediction completed successfully, or an error
1032//             code with message otherwise. If STATUS is not "OK" then the .JSON
1033//             file for that line may not exist or be empty.
1034//
1035//         Each .JSON file, assuming STATUS is "OK", will contain a list of
1036//         AnnotationPayload protos in JSON format, which are the predictions
1037//         for each frame of the video time segment the file is assigned to in
1038//         video_object_tracking.csv. All AnnotationPayload protos will have
1039//         video_object_tracking field set.
1040//  *  For Text Classification:
1041//         In the created directory files `text_classification_1.jsonl`,
1042//         `text_classification_2.jsonl`,...,`text_classification_N.jsonl`
1043//         will be created, where N may be 1, and depends on the
1044//         total number of inputs and annotations found.
1045//
1046//         Each .JSONL file will contain, per line, a JSON representation of a
1047//         proto that wraps input text snippet or input text file and a list of
1048//         zero or more AnnotationPayload protos (called annotations), which
1049//         have classification detail populated. A single text snippet or file
1050//         will be listed only once with all its annotations, and its
1051//         annotations will never be split across files.
1052//
1053//         If prediction for any text snippet or file failed (partially or
1054//         completely), then additional `errors_1.jsonl`, `errors_2.jsonl`,...,
1055//         `errors_N.jsonl` files will be created (N depends on total number of
1056//         failed predictions). These files will have a JSON representation of a
1057//         proto that wraps input text snippet or input text file followed by
1058//         exactly one
1059//
1060// [`google.rpc.Status`](https:
1061// //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto)
1062//         containing only `code` and `message`.
1063//
1064//  *  For Text Sentiment:
1065//         In the created directory files `text_sentiment_1.jsonl`,
1066//         `text_sentiment_2.jsonl`,...,`text_sentiment_N.jsonl`
1067//         will be created, where N may be 1, and depends on the
1068//         total number of inputs and annotations found.
1069//
1070//         Each .JSONL file will contain, per line, a JSON representation of a
1071//         proto that wraps input text snippet or input text file and a list of
1072//         zero or more AnnotationPayload protos (called annotations), which
1073//         have text_sentiment detail populated. A single text snippet or file
1074//         will be listed only once with all its annotations, and its
1075//         annotations will never be split across files.
1076//
1077//         If prediction for any text snippet or file failed (partially or
1078//         completely), then additional `errors_1.jsonl`, `errors_2.jsonl`,...,
1079//         `errors_N.jsonl` files will be created (N depends on total number of
1080//         failed predictions). These files will have a JSON representation of a
1081//         proto that wraps input text snippet or input text file followed by
1082//         exactly one
1083//
1084// [`google.rpc.Status`](https:
1085// //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto)
1086//         containing only `code` and `message`.
1087//
1088//   *  For Text Extraction:
1089//         In the created directory files `text_extraction_1.jsonl`,
1090//         `text_extraction_2.jsonl`,...,`text_extraction_N.jsonl`
1091//         will be created, where N may be 1, and depends on the
1092//         total number of inputs and annotations found.
1093//         The contents of these .JSONL file(s) depend on whether the input
1094//         used inline text, or documents.
1095//         If input was inline, then each .JSONL file will contain, per line,
1096//           a JSON representation of a proto that wraps given in request text
1097//           snippet's "id" (if specified), followed by input text snippet,
1098//           and a list of zero or more
1099//           AnnotationPayload protos (called annotations), which have
1100//           text_extraction detail populated. A single text snippet will be
1101//           listed only once with all its annotations, and its annotations will
1102//           never be split across files.
1103//         If input used documents, then each .JSONL file will contain, per
1104//           line, a JSON representation of a proto that wraps given in request
1105//           document proto, followed by its OCR-ed representation in the form
1106//           of a text snippet, finally followed by a list of zero or more
1107//           AnnotationPayload protos (called annotations), which have
1108//           text_extraction detail populated and refer, via their indices, to
1109//           the OCR-ed text snippet. A single document (and its text snippet)
1110//           will be listed only once with all its annotations, and its
1111//           annotations will never be split across files.
1112//         If prediction for any text snippet failed (partially or completely),
1113//         then additional `errors_1.jsonl`, `errors_2.jsonl`,...,
1114//         `errors_N.jsonl` files will be created (N depends on total number of
1115//         failed predictions). These files will have a JSON representation of a
1116//         proto that wraps either the "id" : "<id_value>" (in case of inline)
1117//         or the document proto (in case of document) but here followed by
1118//         exactly one
1119//
1120// [`google.rpc.Status`](https:
1121// //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto)
1122//         containing only `code` and `message`.
1123//
1124//  *  For Tables:
1125//         Output depends on whether
1126//
1127// [gcs_destination][google.cloud.automl.v1beta1.BatchPredictOutputConfig.gcs_destination]
1128//         or
1129//
1130// [bigquery_destination][google.cloud.automl.v1beta1.BatchPredictOutputConfig.bigquery_destination]
1131//         is set (either is allowed).
1132//         GCS case:
1133//           In the created directory files `tables_1.csv`, `tables_2.csv`,...,
1134//           `tables_N.csv` will be created, where N may be 1, and depends on
1135//           the total number of the successfully predicted rows.
1136//           For all CLASSIFICATION
1137//
1138// [prediction_type-s][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]:
1139//             Each .csv file will contain a header, listing all columns'
1140//
1141// [display_name-s][google.cloud.automl.v1beta1.ColumnSpec.display_name]
1142//             given on input followed by M target column names in the format of
1143//
1144// "<[target_column_specs][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec]
1145//
1146// [display_name][google.cloud.automl.v1beta1.ColumnSpec.display_name]>_<target
1147//             value>_score" where M is the number of distinct target values,
1148//             i.e. number of distinct values in the target column of the table
1149//             used to train the model. Subsequent lines will contain the
1150//             respective values of successfully predicted rows, with the last,
1151//             i.e. the target, columns having the corresponding prediction
1152//             [scores][google.cloud.automl.v1beta1.TablesAnnotation.score].
1153//           For REGRESSION and FORECASTING
1154//
1155// [prediction_type-s][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]:
1156//             Each .csv file will contain a header, listing all columns'
1157//             [display_name-s][google.cloud.automl.v1beta1.display_name] given
1158//             on input followed by the predicted target column with name in the
1159//             format of
1160//
1161// "predicted_<[target_column_specs][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec]
1162//
1163// [display_name][google.cloud.automl.v1beta1.ColumnSpec.display_name]>"
1164//             Subsequent lines will contain the respective values of
1165//             successfully predicted rows, with the last, i.e. the target,
1166//             column having the predicted target value.
1167//             If prediction for any rows failed, then an additional
1168//             `errors_1.csv`, `errors_2.csv`,..., `errors_N.csv` will be
1169//             created (N depends on total number of failed rows). These files
1170//             will have analogous format as `tables_*.csv`, but always with a
1171//             single target column having
1172//
1173// [`google.rpc.Status`](https:
1174// //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto)
1175//             represented as a JSON string, and containing only `code` and
1176//             `message`.
1177//         BigQuery case:
1178//
1179// [bigquery_destination][google.cloud.automl.v1beta1.OutputConfig.bigquery_destination]
1180//           pointing to a BigQuery project must be set. In the given project a
1181//           new dataset will be created with name
1182//           `prediction_<model-display-name>_<timestamp-of-prediction-call>`
1183//           where <model-display-name> will be made
1184//           BigQuery-dataset-name compatible (e.g. most special characters will
1185//           become underscores), and timestamp will be in
1186//           YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset
1187//           two tables will be created, `predictions`, and `errors`.
1188//           The `predictions` table's column names will be the input columns'
1189//
1190// [display_name-s][google.cloud.automl.v1beta1.ColumnSpec.display_name]
1191//           followed by the target column with name in the format of
1192//
1193// "predicted_<[target_column_specs][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec]
1194//
1195// [display_name][google.cloud.automl.v1beta1.ColumnSpec.display_name]>"
1196//           The input feature columns will contain the respective values of
1197//           successfully predicted rows, with the target column having an
1198//           ARRAY of
1199//
1200// [AnnotationPayloads][google.cloud.automl.v1beta1.AnnotationPayload],
1201//           represented as STRUCT-s, containing
1202//           [TablesAnnotation][google.cloud.automl.v1beta1.TablesAnnotation].
1203//           The `errors` table contains rows for which the prediction has
1204//           failed, it has analogous input columns while the target column name
1205//           is in the format of
1206//
1207// "errors_<[target_column_specs][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec]
1208//
1209// [display_name][google.cloud.automl.v1beta1.ColumnSpec.display_name]>",
1210//           and as a value has
1211//
1212// [`google.rpc.Status`](https:
1213// //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto)
1214//           represented as a STRUCT, and containing only `code` and `message`.
1215type BatchPredictOutputConfig struct {
1216	// Required. The destination of the output.
1217	//
1218	// Types that are valid to be assigned to Destination:
1219	//	*BatchPredictOutputConfig_GcsDestination
1220	//	*BatchPredictOutputConfig_BigqueryDestination
1221	Destination          isBatchPredictOutputConfig_Destination `protobuf_oneof:"destination"`
1222	XXX_NoUnkeyedLiteral struct{}                               `json:"-"`
1223	XXX_unrecognized     []byte                                 `json:"-"`
1224	XXX_sizecache        int32                                  `json:"-"`
1225}
1226
1227func (m *BatchPredictOutputConfig) Reset()         { *m = BatchPredictOutputConfig{} }
1228func (m *BatchPredictOutputConfig) String() string { return proto.CompactTextString(m) }
1229func (*BatchPredictOutputConfig) ProtoMessage()    {}
1230func (*BatchPredictOutputConfig) Descriptor() ([]byte, []int) {
1231	return fileDescriptor_6e2d768504aa30d7, []int{4}
1232}
1233
1234func (m *BatchPredictOutputConfig) XXX_Unmarshal(b []byte) error {
1235	return xxx_messageInfo_BatchPredictOutputConfig.Unmarshal(m, b)
1236}
1237func (m *BatchPredictOutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1238	return xxx_messageInfo_BatchPredictOutputConfig.Marshal(b, m, deterministic)
1239}
1240func (m *BatchPredictOutputConfig) XXX_Merge(src proto.Message) {
1241	xxx_messageInfo_BatchPredictOutputConfig.Merge(m, src)
1242}
1243func (m *BatchPredictOutputConfig) XXX_Size() int {
1244	return xxx_messageInfo_BatchPredictOutputConfig.Size(m)
1245}
1246func (m *BatchPredictOutputConfig) XXX_DiscardUnknown() {
1247	xxx_messageInfo_BatchPredictOutputConfig.DiscardUnknown(m)
1248}
1249
1250var xxx_messageInfo_BatchPredictOutputConfig proto.InternalMessageInfo
1251
1252type isBatchPredictOutputConfig_Destination interface {
1253	isBatchPredictOutputConfig_Destination()
1254}
1255
1256type BatchPredictOutputConfig_GcsDestination struct {
1257	GcsDestination *GcsDestination `protobuf:"bytes,1,opt,name=gcs_destination,json=gcsDestination,proto3,oneof"`
1258}
1259
1260type BatchPredictOutputConfig_BigqueryDestination struct {
1261	BigqueryDestination *BigQueryDestination `protobuf:"bytes,2,opt,name=bigquery_destination,json=bigqueryDestination,proto3,oneof"`
1262}
1263
1264func (*BatchPredictOutputConfig_GcsDestination) isBatchPredictOutputConfig_Destination() {}
1265
1266func (*BatchPredictOutputConfig_BigqueryDestination) isBatchPredictOutputConfig_Destination() {}
1267
1268func (m *BatchPredictOutputConfig) GetDestination() isBatchPredictOutputConfig_Destination {
1269	if m != nil {
1270		return m.Destination
1271	}
1272	return nil
1273}
1274
1275func (m *BatchPredictOutputConfig) GetGcsDestination() *GcsDestination {
1276	if x, ok := m.GetDestination().(*BatchPredictOutputConfig_GcsDestination); ok {
1277		return x.GcsDestination
1278	}
1279	return nil
1280}
1281
1282func (m *BatchPredictOutputConfig) GetBigqueryDestination() *BigQueryDestination {
1283	if x, ok := m.GetDestination().(*BatchPredictOutputConfig_BigqueryDestination); ok {
1284		return x.BigqueryDestination
1285	}
1286	return nil
1287}
1288
1289// XXX_OneofWrappers is for the internal use of the proto package.
1290func (*BatchPredictOutputConfig) XXX_OneofWrappers() []interface{} {
1291	return []interface{}{
1292		(*BatchPredictOutputConfig_GcsDestination)(nil),
1293		(*BatchPredictOutputConfig_BigqueryDestination)(nil),
1294	}
1295}
1296
1297// Output configuration for ModelExport Action.
1298type ModelExportOutputConfig struct {
1299	// Required. The destination of the output.
1300	//
1301	// Types that are valid to be assigned to Destination:
1302	//	*ModelExportOutputConfig_GcsDestination
1303	//	*ModelExportOutputConfig_GcrDestination
1304	Destination isModelExportOutputConfig_Destination `protobuf_oneof:"destination"`
1305	// The format in which the model must be exported. The available, and default,
1306	// formats depend on the problem and model type (if given problem and type
1307	// combination doesn't have a format listed, it means its models are not
1308	// exportable):
1309	//
1310	// *  For Image Classification mobile-low-latency-1, mobile-versatile-1,
1311	//        mobile-high-accuracy-1:
1312	//      "tflite" (default), "edgetpu_tflite", "tf_saved_model", "tf_js",
1313	//      "docker".
1314	//
1315	// *  For Image Classification mobile-core-ml-low-latency-1,
1316	//        mobile-core-ml-versatile-1, mobile-core-ml-high-accuracy-1:
1317	//      "core_ml" (default).
1318	// Formats description:
1319	//
1320	// * tflite - Used for Android mobile devices.
1321	// * edgetpu_tflite - Used for [Edge TPU](https://cloud.google.com/edge-tpu/)
1322	//                    devices.
1323	// * tf_saved_model - A tensorflow model in SavedModel format.
1324	// * tf_js - A [TensorFlow.js](https://www.tensorflow.org/js) model that can
1325	//           be used in the browser and in Node.js using JavaScript.
1326	// * docker - Used for Docker containers. Use the params field to customize
1327	//            the container. The container is verified to work correctly on
1328	//            ubuntu 16.04 operating system. See more at
1329	//            [containers
1330	//
1331	// quickstart](https:
1332	// //cloud.google.com/vision/automl/docs/containers-gcs-quickstart)
1333	// * core_ml - Used for iOS mobile devices.
1334	ModelFormat string `protobuf:"bytes,4,opt,name=model_format,json=modelFormat,proto3" json:"model_format,omitempty"`
1335	// Additional model-type and format specific parameters describing the
1336	// requirements for the to be exported model files, any string must be up to
1337	// 25000 characters long.
1338	//
1339	//  * For `docker` format:
1340	//     `cpu_architecture` - (string) "x86_64" (default).
1341	//     `gpu_architecture` - (string) "none" (default), "nvidia".
1342	Params               map[string]string `protobuf:"bytes,2,rep,name=params,proto3" json:"params,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
1343	XXX_NoUnkeyedLiteral struct{}          `json:"-"`
1344	XXX_unrecognized     []byte            `json:"-"`
1345	XXX_sizecache        int32             `json:"-"`
1346}
1347
1348func (m *ModelExportOutputConfig) Reset()         { *m = ModelExportOutputConfig{} }
1349func (m *ModelExportOutputConfig) String() string { return proto.CompactTextString(m) }
1350func (*ModelExportOutputConfig) ProtoMessage()    {}
1351func (*ModelExportOutputConfig) Descriptor() ([]byte, []int) {
1352	return fileDescriptor_6e2d768504aa30d7, []int{5}
1353}
1354
1355func (m *ModelExportOutputConfig) XXX_Unmarshal(b []byte) error {
1356	return xxx_messageInfo_ModelExportOutputConfig.Unmarshal(m, b)
1357}
1358func (m *ModelExportOutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1359	return xxx_messageInfo_ModelExportOutputConfig.Marshal(b, m, deterministic)
1360}
1361func (m *ModelExportOutputConfig) XXX_Merge(src proto.Message) {
1362	xxx_messageInfo_ModelExportOutputConfig.Merge(m, src)
1363}
1364func (m *ModelExportOutputConfig) XXX_Size() int {
1365	return xxx_messageInfo_ModelExportOutputConfig.Size(m)
1366}
1367func (m *ModelExportOutputConfig) XXX_DiscardUnknown() {
1368	xxx_messageInfo_ModelExportOutputConfig.DiscardUnknown(m)
1369}
1370
1371var xxx_messageInfo_ModelExportOutputConfig proto.InternalMessageInfo
1372
1373type isModelExportOutputConfig_Destination interface {
1374	isModelExportOutputConfig_Destination()
1375}
1376
1377type ModelExportOutputConfig_GcsDestination struct {
1378	GcsDestination *GcsDestination `protobuf:"bytes,1,opt,name=gcs_destination,json=gcsDestination,proto3,oneof"`
1379}
1380
1381type ModelExportOutputConfig_GcrDestination struct {
1382	GcrDestination *GcrDestination `protobuf:"bytes,3,opt,name=gcr_destination,json=gcrDestination,proto3,oneof"`
1383}
1384
1385func (*ModelExportOutputConfig_GcsDestination) isModelExportOutputConfig_Destination() {}
1386
1387func (*ModelExportOutputConfig_GcrDestination) isModelExportOutputConfig_Destination() {}
1388
1389func (m *ModelExportOutputConfig) GetDestination() isModelExportOutputConfig_Destination {
1390	if m != nil {
1391		return m.Destination
1392	}
1393	return nil
1394}
1395
1396func (m *ModelExportOutputConfig) GetGcsDestination() *GcsDestination {
1397	if x, ok := m.GetDestination().(*ModelExportOutputConfig_GcsDestination); ok {
1398		return x.GcsDestination
1399	}
1400	return nil
1401}
1402
1403func (m *ModelExportOutputConfig) GetGcrDestination() *GcrDestination {
1404	if x, ok := m.GetDestination().(*ModelExportOutputConfig_GcrDestination); ok {
1405		return x.GcrDestination
1406	}
1407	return nil
1408}
1409
1410func (m *ModelExportOutputConfig) GetModelFormat() string {
1411	if m != nil {
1412		return m.ModelFormat
1413	}
1414	return ""
1415}
1416
1417func (m *ModelExportOutputConfig) GetParams() map[string]string {
1418	if m != nil {
1419		return m.Params
1420	}
1421	return nil
1422}
1423
1424// XXX_OneofWrappers is for the internal use of the proto package.
1425func (*ModelExportOutputConfig) XXX_OneofWrappers() []interface{} {
1426	return []interface{}{
1427		(*ModelExportOutputConfig_GcsDestination)(nil),
1428		(*ModelExportOutputConfig_GcrDestination)(nil),
1429	}
1430}
1431
1432// Output configuration for ExportEvaluatedExamples Action. Note that this call
1433// is available only for 30 days since the moment the model was evaluated.
1434// The output depends on the domain, as follows (note that only examples from
1435// the TEST set are exported):
1436//
1437//  *  For Tables:
1438//
1439// [bigquery_destination][google.cloud.automl.v1beta1.OutputConfig.bigquery_destination]
1440//       pointing to a BigQuery project must be set. In the given project a
1441//       new dataset will be created with name
1442//
1443// `export_evaluated_examples_<model-display-name>_<timestamp-of-export-call>`
1444//       where <model-display-name> will be made BigQuery-dataset-name
1445//       compatible (e.g. most special characters will become underscores),
1446//       and timestamp will be in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601"
1447//       format. In the dataset an `evaluated_examples` table will be
1448//       created. It will have all the same columns as the
1449//
1450// [primary_table][google.cloud.automl.v1beta1.TablesDatasetMetadata.primary_table_spec_id]
1451//       of the
1452//       [dataset][google.cloud.automl.v1beta1.Model.dataset_id] from which
1453//       the model was created, as they were at the moment of model's
1454//       evaluation (this includes the target column with its ground
1455//       truth), followed by a column called "predicted_<target_column>". That
1456//       last column will contain the model's prediction result for each
1457//       respective row, given as ARRAY of
1458//       [AnnotationPayloads][google.cloud.automl.v1beta1.AnnotationPayload],
1459//       represented as STRUCT-s, containing
1460//       [TablesAnnotation][google.cloud.automl.v1beta1.TablesAnnotation].
1461type ExportEvaluatedExamplesOutputConfig struct {
1462	// Required. The destination of the output.
1463	//
1464	// Types that are valid to be assigned to Destination:
1465	//	*ExportEvaluatedExamplesOutputConfig_BigqueryDestination
1466	Destination          isExportEvaluatedExamplesOutputConfig_Destination `protobuf_oneof:"destination"`
1467	XXX_NoUnkeyedLiteral struct{}                                          `json:"-"`
1468	XXX_unrecognized     []byte                                            `json:"-"`
1469	XXX_sizecache        int32                                             `json:"-"`
1470}
1471
1472func (m *ExportEvaluatedExamplesOutputConfig) Reset()         { *m = ExportEvaluatedExamplesOutputConfig{} }
1473func (m *ExportEvaluatedExamplesOutputConfig) String() string { return proto.CompactTextString(m) }
1474func (*ExportEvaluatedExamplesOutputConfig) ProtoMessage()    {}
1475func (*ExportEvaluatedExamplesOutputConfig) Descriptor() ([]byte, []int) {
1476	return fileDescriptor_6e2d768504aa30d7, []int{6}
1477}
1478
1479func (m *ExportEvaluatedExamplesOutputConfig) XXX_Unmarshal(b []byte) error {
1480	return xxx_messageInfo_ExportEvaluatedExamplesOutputConfig.Unmarshal(m, b)
1481}
1482func (m *ExportEvaluatedExamplesOutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1483	return xxx_messageInfo_ExportEvaluatedExamplesOutputConfig.Marshal(b, m, deterministic)
1484}
1485func (m *ExportEvaluatedExamplesOutputConfig) XXX_Merge(src proto.Message) {
1486	xxx_messageInfo_ExportEvaluatedExamplesOutputConfig.Merge(m, src)
1487}
1488func (m *ExportEvaluatedExamplesOutputConfig) XXX_Size() int {
1489	return xxx_messageInfo_ExportEvaluatedExamplesOutputConfig.Size(m)
1490}
1491func (m *ExportEvaluatedExamplesOutputConfig) XXX_DiscardUnknown() {
1492	xxx_messageInfo_ExportEvaluatedExamplesOutputConfig.DiscardUnknown(m)
1493}
1494
1495var xxx_messageInfo_ExportEvaluatedExamplesOutputConfig proto.InternalMessageInfo
1496
1497type isExportEvaluatedExamplesOutputConfig_Destination interface {
1498	isExportEvaluatedExamplesOutputConfig_Destination()
1499}
1500
1501type ExportEvaluatedExamplesOutputConfig_BigqueryDestination struct {
1502	BigqueryDestination *BigQueryDestination `protobuf:"bytes,2,opt,name=bigquery_destination,json=bigqueryDestination,proto3,oneof"`
1503}
1504
1505func (*ExportEvaluatedExamplesOutputConfig_BigqueryDestination) isExportEvaluatedExamplesOutputConfig_Destination() {
1506}
1507
1508func (m *ExportEvaluatedExamplesOutputConfig) GetDestination() isExportEvaluatedExamplesOutputConfig_Destination {
1509	if m != nil {
1510		return m.Destination
1511	}
1512	return nil
1513}
1514
1515func (m *ExportEvaluatedExamplesOutputConfig) GetBigqueryDestination() *BigQueryDestination {
1516	if x, ok := m.GetDestination().(*ExportEvaluatedExamplesOutputConfig_BigqueryDestination); ok {
1517		return x.BigqueryDestination
1518	}
1519	return nil
1520}
1521
1522// XXX_OneofWrappers is for the internal use of the proto package.
1523func (*ExportEvaluatedExamplesOutputConfig) XXX_OneofWrappers() []interface{} {
1524	return []interface{}{
1525		(*ExportEvaluatedExamplesOutputConfig_BigqueryDestination)(nil),
1526	}
1527}
1528
1529// The Google Cloud Storage location for the input content.
1530type GcsSource struct {
1531	// Required. Google Cloud Storage URIs to input files, up to 2000 characters
1532	// long. Accepted forms:
1533	// * Full object path, e.g. gs://bucket/directory/object.csv
1534	InputUris            []string `protobuf:"bytes,1,rep,name=input_uris,json=inputUris,proto3" json:"input_uris,omitempty"`
1535	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1536	XXX_unrecognized     []byte   `json:"-"`
1537	XXX_sizecache        int32    `json:"-"`
1538}
1539
1540func (m *GcsSource) Reset()         { *m = GcsSource{} }
1541func (m *GcsSource) String() string { return proto.CompactTextString(m) }
1542func (*GcsSource) ProtoMessage()    {}
1543func (*GcsSource) Descriptor() ([]byte, []int) {
1544	return fileDescriptor_6e2d768504aa30d7, []int{7}
1545}
1546
1547func (m *GcsSource) XXX_Unmarshal(b []byte) error {
1548	return xxx_messageInfo_GcsSource.Unmarshal(m, b)
1549}
1550func (m *GcsSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1551	return xxx_messageInfo_GcsSource.Marshal(b, m, deterministic)
1552}
1553func (m *GcsSource) XXX_Merge(src proto.Message) {
1554	xxx_messageInfo_GcsSource.Merge(m, src)
1555}
1556func (m *GcsSource) XXX_Size() int {
1557	return xxx_messageInfo_GcsSource.Size(m)
1558}
1559func (m *GcsSource) XXX_DiscardUnknown() {
1560	xxx_messageInfo_GcsSource.DiscardUnknown(m)
1561}
1562
1563var xxx_messageInfo_GcsSource proto.InternalMessageInfo
1564
1565func (m *GcsSource) GetInputUris() []string {
1566	if m != nil {
1567		return m.InputUris
1568	}
1569	return nil
1570}
1571
1572// The BigQuery location for the input content.
1573type BigQuerySource struct {
1574	// Required. BigQuery URI to a table, up to 2000 characters long.
1575	// Accepted forms:
1576	// *  BigQuery path e.g. bq://projectId.bqDatasetId.bqTableId
1577	InputUri             string   `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"`
1578	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1579	XXX_unrecognized     []byte   `json:"-"`
1580	XXX_sizecache        int32    `json:"-"`
1581}
1582
1583func (m *BigQuerySource) Reset()         { *m = BigQuerySource{} }
1584func (m *BigQuerySource) String() string { return proto.CompactTextString(m) }
1585func (*BigQuerySource) ProtoMessage()    {}
1586func (*BigQuerySource) Descriptor() ([]byte, []int) {
1587	return fileDescriptor_6e2d768504aa30d7, []int{8}
1588}
1589
1590func (m *BigQuerySource) XXX_Unmarshal(b []byte) error {
1591	return xxx_messageInfo_BigQuerySource.Unmarshal(m, b)
1592}
1593func (m *BigQuerySource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1594	return xxx_messageInfo_BigQuerySource.Marshal(b, m, deterministic)
1595}
1596func (m *BigQuerySource) XXX_Merge(src proto.Message) {
1597	xxx_messageInfo_BigQuerySource.Merge(m, src)
1598}
1599func (m *BigQuerySource) XXX_Size() int {
1600	return xxx_messageInfo_BigQuerySource.Size(m)
1601}
1602func (m *BigQuerySource) XXX_DiscardUnknown() {
1603	xxx_messageInfo_BigQuerySource.DiscardUnknown(m)
1604}
1605
1606var xxx_messageInfo_BigQuerySource proto.InternalMessageInfo
1607
1608func (m *BigQuerySource) GetInputUri() string {
1609	if m != nil {
1610		return m.InputUri
1611	}
1612	return ""
1613}
1614
1615// The Google Cloud Storage location where the output is to be written to.
1616type GcsDestination struct {
1617	// Required. Google Cloud Storage URI to output directory, up to 2000
1618	// characters long.
1619	// Accepted forms:
1620	// * Prefix path: gs://bucket/directory
1621	// The requesting user must have write permission to the bucket.
1622	// The directory is created if it doesn't exist.
1623	OutputUriPrefix      string   `protobuf:"bytes,1,opt,name=output_uri_prefix,json=outputUriPrefix,proto3" json:"output_uri_prefix,omitempty"`
1624	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1625	XXX_unrecognized     []byte   `json:"-"`
1626	XXX_sizecache        int32    `json:"-"`
1627}
1628
1629func (m *GcsDestination) Reset()         { *m = GcsDestination{} }
1630func (m *GcsDestination) String() string { return proto.CompactTextString(m) }
1631func (*GcsDestination) ProtoMessage()    {}
1632func (*GcsDestination) Descriptor() ([]byte, []int) {
1633	return fileDescriptor_6e2d768504aa30d7, []int{9}
1634}
1635
1636func (m *GcsDestination) XXX_Unmarshal(b []byte) error {
1637	return xxx_messageInfo_GcsDestination.Unmarshal(m, b)
1638}
1639func (m *GcsDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1640	return xxx_messageInfo_GcsDestination.Marshal(b, m, deterministic)
1641}
1642func (m *GcsDestination) XXX_Merge(src proto.Message) {
1643	xxx_messageInfo_GcsDestination.Merge(m, src)
1644}
1645func (m *GcsDestination) XXX_Size() int {
1646	return xxx_messageInfo_GcsDestination.Size(m)
1647}
1648func (m *GcsDestination) XXX_DiscardUnknown() {
1649	xxx_messageInfo_GcsDestination.DiscardUnknown(m)
1650}
1651
1652var xxx_messageInfo_GcsDestination proto.InternalMessageInfo
1653
1654func (m *GcsDestination) GetOutputUriPrefix() string {
1655	if m != nil {
1656		return m.OutputUriPrefix
1657	}
1658	return ""
1659}
1660
1661// The BigQuery location for the output content.
1662type BigQueryDestination struct {
1663	// Required. BigQuery URI to a project, up to 2000 characters long.
1664	// Accepted forms:
1665	// *  BigQuery path e.g. bq://projectId
1666	OutputUri            string   `protobuf:"bytes,1,opt,name=output_uri,json=outputUri,proto3" json:"output_uri,omitempty"`
1667	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1668	XXX_unrecognized     []byte   `json:"-"`
1669	XXX_sizecache        int32    `json:"-"`
1670}
1671
1672func (m *BigQueryDestination) Reset()         { *m = BigQueryDestination{} }
1673func (m *BigQueryDestination) String() string { return proto.CompactTextString(m) }
1674func (*BigQueryDestination) ProtoMessage()    {}
1675func (*BigQueryDestination) Descriptor() ([]byte, []int) {
1676	return fileDescriptor_6e2d768504aa30d7, []int{10}
1677}
1678
1679func (m *BigQueryDestination) XXX_Unmarshal(b []byte) error {
1680	return xxx_messageInfo_BigQueryDestination.Unmarshal(m, b)
1681}
1682func (m *BigQueryDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1683	return xxx_messageInfo_BigQueryDestination.Marshal(b, m, deterministic)
1684}
1685func (m *BigQueryDestination) XXX_Merge(src proto.Message) {
1686	xxx_messageInfo_BigQueryDestination.Merge(m, src)
1687}
1688func (m *BigQueryDestination) XXX_Size() int {
1689	return xxx_messageInfo_BigQueryDestination.Size(m)
1690}
1691func (m *BigQueryDestination) XXX_DiscardUnknown() {
1692	xxx_messageInfo_BigQueryDestination.DiscardUnknown(m)
1693}
1694
1695var xxx_messageInfo_BigQueryDestination proto.InternalMessageInfo
1696
1697func (m *BigQueryDestination) GetOutputUri() string {
1698	if m != nil {
1699		return m.OutputUri
1700	}
1701	return ""
1702}
1703
1704// The GCR location where the image must be pushed to.
1705type GcrDestination struct {
1706	// Required. Google Contained Registry URI of the new image, up to 2000
1707	// characters long. See
1708	//
1709	// https:
1710	// //cloud.google.com/container-registry/do
1711	// // cs/pushing-and-pulling#pushing_an_image_to_a_registry
1712	// Accepted forms:
1713	// * [HOSTNAME]/[PROJECT-ID]/[IMAGE]
1714	// * [HOSTNAME]/[PROJECT-ID]/[IMAGE]:[TAG]
1715	//
1716	// The requesting user must have permission to push images the project.
1717	OutputUri            string   `protobuf:"bytes,1,opt,name=output_uri,json=outputUri,proto3" json:"output_uri,omitempty"`
1718	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1719	XXX_unrecognized     []byte   `json:"-"`
1720	XXX_sizecache        int32    `json:"-"`
1721}
1722
1723func (m *GcrDestination) Reset()         { *m = GcrDestination{} }
1724func (m *GcrDestination) String() string { return proto.CompactTextString(m) }
1725func (*GcrDestination) ProtoMessage()    {}
1726func (*GcrDestination) Descriptor() ([]byte, []int) {
1727	return fileDescriptor_6e2d768504aa30d7, []int{11}
1728}
1729
1730func (m *GcrDestination) XXX_Unmarshal(b []byte) error {
1731	return xxx_messageInfo_GcrDestination.Unmarshal(m, b)
1732}
1733func (m *GcrDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1734	return xxx_messageInfo_GcrDestination.Marshal(b, m, deterministic)
1735}
1736func (m *GcrDestination) XXX_Merge(src proto.Message) {
1737	xxx_messageInfo_GcrDestination.Merge(m, src)
1738}
1739func (m *GcrDestination) XXX_Size() int {
1740	return xxx_messageInfo_GcrDestination.Size(m)
1741}
1742func (m *GcrDestination) XXX_DiscardUnknown() {
1743	xxx_messageInfo_GcrDestination.DiscardUnknown(m)
1744}
1745
1746var xxx_messageInfo_GcrDestination proto.InternalMessageInfo
1747
1748func (m *GcrDestination) GetOutputUri() string {
1749	if m != nil {
1750		return m.OutputUri
1751	}
1752	return ""
1753}
1754
1755func init() {
1756	proto.RegisterType((*InputConfig)(nil), "google.cloud.automl.v1beta1.InputConfig")
1757	proto.RegisterMapType((map[string]string)(nil), "google.cloud.automl.v1beta1.InputConfig.ParamsEntry")
1758	proto.RegisterType((*BatchPredictInputConfig)(nil), "google.cloud.automl.v1beta1.BatchPredictInputConfig")
1759	proto.RegisterType((*DocumentInputConfig)(nil), "google.cloud.automl.v1beta1.DocumentInputConfig")
1760	proto.RegisterType((*OutputConfig)(nil), "google.cloud.automl.v1beta1.OutputConfig")
1761	proto.RegisterType((*BatchPredictOutputConfig)(nil), "google.cloud.automl.v1beta1.BatchPredictOutputConfig")
1762	proto.RegisterType((*ModelExportOutputConfig)(nil), "google.cloud.automl.v1beta1.ModelExportOutputConfig")
1763	proto.RegisterMapType((map[string]string)(nil), "google.cloud.automl.v1beta1.ModelExportOutputConfig.ParamsEntry")
1764	proto.RegisterType((*ExportEvaluatedExamplesOutputConfig)(nil), "google.cloud.automl.v1beta1.ExportEvaluatedExamplesOutputConfig")
1765	proto.RegisterType((*GcsSource)(nil), "google.cloud.automl.v1beta1.GcsSource")
1766	proto.RegisterType((*BigQuerySource)(nil), "google.cloud.automl.v1beta1.BigQuerySource")
1767	proto.RegisterType((*GcsDestination)(nil), "google.cloud.automl.v1beta1.GcsDestination")
1768	proto.RegisterType((*BigQueryDestination)(nil), "google.cloud.automl.v1beta1.BigQueryDestination")
1769	proto.RegisterType((*GcrDestination)(nil), "google.cloud.automl.v1beta1.GcrDestination")
1770}
1771
1772func init() {
1773	proto.RegisterFile("google/cloud/automl/v1beta1/io.proto", fileDescriptor_6e2d768504aa30d7)
1774}
1775
1776var fileDescriptor_6e2d768504aa30d7 = []byte{
1777	// 647 bytes of a gzipped FileDescriptorProto
1778	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x96, 0xdf, 0x4e, 0xd4, 0x4e,
1779	0x14, 0xc7, 0x7f, 0x6d, 0x7f, 0x12, 0x7a, 0x8a, 0xa0, 0x85, 0x84, 0x06, 0xfc, 0x83, 0xd5, 0x18,
1780	0x02, 0xb1, 0x15, 0xe4, 0x42, 0xab, 0x17, 0xb2, 0xb0, 0xa2, 0x09, 0xc4, 0x75, 0x0d, 0xc4, 0x98,
1781	0x4d, 0x36, 0x43, 0x77, 0x18, 0x27, 0xb6, 0x9d, 0x3a, 0x9d, 0x12, 0x78, 0x0b, 0xef, 0xbd, 0xf6,
1782	0x41, 0x4c, 0xbc, 0xf2, 0x19, 0x7c, 0x00, 0xe3, 0x53, 0x98, 0xce, 0x74, 0x97, 0x16, 0x71, 0x25,
1783	0x86, 0x60, 0xe2, 0xdd, 0xce, 0x39, 0xdf, 0xf3, 0x39, 0xfd, 0x9e, 0xce, 0x9e, 0x5d, 0xb8, 0x45,
1784	0x18, 0x23, 0x11, 0xf6, 0xc3, 0x88, 0xe5, 0x3d, 0x1f, 0xe5, 0x82, 0xc5, 0x91, 0xbf, 0xbf, 0xb4,
1785	0x8b, 0x05, 0x5a, 0xf2, 0x29, 0xf3, 0x52, 0xce, 0x04, 0xb3, 0x67, 0x95, 0xca, 0x93, 0x2a, 0x4f,
1786	0xa9, 0xbc, 0x52, 0x35, 0x73, 0xa5, 0x44, 0xa0, 0x94, 0xfa, 0x28, 0x49, 0x98, 0x40, 0x82, 0xb2,
1787	0x24, 0x53, 0xa5, 0xee, 0x27, 0x1d, 0xac, 0x67, 0x49, 0x9a, 0x8b, 0x35, 0x96, 0xec, 0x51, 0x62,
1788	0x6f, 0x00, 0x90, 0x30, 0xeb, 0x66, 0x2c, 0xe7, 0x21, 0x76, 0xb4, 0x39, 0x6d, 0xde, 0x5a, 0xbe,
1789	0xed, 0x0d, 0xe1, 0x7b, 0x1b, 0x61, 0xf6, 0x52, 0xaa, 0x9f, 0xfe, 0xd7, 0x36, 0x49, 0xff, 0x60,
1790	0xef, 0xc0, 0xc4, 0x2e, 0x25, 0xef, 0x72, 0xcc, 0x0f, 0xfb, 0x34, 0x43, 0xd2, 0x16, 0x87, 0xd2,
1791	0x1a, 0x94, 0xbc, 0x28, 0x6a, 0x06, 0xc8, 0xf1, 0x3e, 0xa5, 0xe4, 0x6e, 0xc2, 0x48, 0x8a, 0x38,
1792	0x8a, 0x33, 0x47, 0x9f, 0x33, 0xe6, 0xad, 0xe5, 0x95, 0xa1, 0xb8, 0x8a, 0x35, 0xaf, 0x25, 0xcb,
1793	0x9a, 0x89, 0xe0, 0x87, 0xed, 0x92, 0x31, 0xf3, 0x00, 0xac, 0x4a, 0xd8, 0xbe, 0x04, 0xc6, 0x5b,
1794	0x7c, 0x28, 0x6d, 0x9b, 0xed, 0xe2, 0xa3, 0x3d, 0x05, 0x17, 0xf6, 0x51, 0x94, 0x63, 0x47, 0x97,
1795	0x31, 0x75, 0x08, 0xf4, 0xfb, 0x5a, 0x63, 0x14, 0x46, 0x94, 0x2f, 0xf7, 0xb3, 0x06, 0xd3, 0x0d,
1796	0x24, 0xc2, 0x37, 0x2d, 0x8e, 0x7b, 0x34, 0x14, 0xe7, 0x35, 0x4f, 0xfd, 0x0c, 0xe6, 0x59, 0xb1,
1797	0xd1, 0x81, 0xc9, 0x75, 0x16, 0xe6, 0x31, 0x4e, 0x6a, 0x0e, 0x9a, 0x7f, 0xee, 0xa0, 0xf2, 0xfc,
1798	0xee, 0x57, 0x0d, 0xc6, 0x9e, 0xe7, 0xe2, 0x88, 0xbb, 0x03, 0x13, 0x05, 0xb7, 0x87, 0x33, 0x41,
1799	0x13, 0x79, 0x27, 0x4b, 0xf8, 0xe2, 0xef, 0xe0, 0xeb, 0x47, 0x25, 0x85, 0x21, 0x52, 0x8b, 0xd8,
1800	0x18, 0xa6, 0x06, 0x83, 0xaa, 0xc2, 0xd5, 0xb4, 0xee, 0x9e, 0x6a, 0x5a, 0xf5, 0x0e, 0x93, 0x7d,
1801	0x5e, 0x25, 0xdc, 0xb8, 0x08, 0x56, 0x85, 0xee, 0x7e, 0xd3, 0xc0, 0xa9, 0xde, 0x81, 0x7f, 0xd8,
1802	0xea, 0x7b, 0x03, 0xa6, 0xb7, 0x58, 0x0f, 0x47, 0xcd, 0x83, 0x94, 0xf1, 0xf3, 0x71, 0x2a, 0xb9,
1803	0xbc, 0xc6, 0x35, 0x4e, 0xc5, 0xe5, 0x3f, 0x71, 0xab, 0x11, 0xfb, 0x06, 0x8c, 0xc5, 0x85, 0x95,
1804	0xee, 0x1e, 0xe3, 0x31, 0x12, 0xce, 0xff, 0xf2, 0x5b, 0x6e, 0xc9, 0xd8, 0x13, 0x19, 0xb2, 0x5f,
1805	0x1d, 0x5b, 0x38, 0x8f, 0x87, 0x76, 0xfc, 0xc5, 0x60, 0xce, 0x7a, 0xf9, 0x1c, 0x7b, 0x25, 0x1f,
1806	0x34, 0xb8, 0xa9, 0x9a, 0x36, 0x0b, 0x0d, 0x12, 0xb8, 0xd7, 0x3c, 0x40, 0x71, 0x1a, 0xe1, 0xac,
1807	0xf6, 0x7a, 0xfe, 0xce, 0x85, 0x59, 0x00, 0x73, 0xb0, 0x12, 0xec, 0xab, 0x00, 0xb4, 0xd8, 0x2e,
1808	0xdd, 0x9c, 0xd3, 0xcc, 0xd1, 0xe6, 0x8c, 0x79, 0xb3, 0x6d, 0xca, 0xc8, 0x36, 0xa7, 0x99, 0x7b,
1809	0x07, 0xc6, 0xeb, 0x2b, 0xcb, 0x9e, 0x05, 0x73, 0x50, 0x50, 0x0e, 0x67, 0xb4, 0xaf, 0x77, 0x1f,
1810	0xc1, 0x78, 0xfd, 0xee, 0xd8, 0x0b, 0x70, 0x99, 0x49, 0xcb, 0x85, 0xbe, 0x9b, 0x72, 0xbc, 0x47,
1811	0x0f, 0xca, 0xb2, 0x09, 0x95, 0xd8, 0xe6, 0xb4, 0x25, 0xc3, 0xee, 0x0a, 0x4c, 0x9e, 0xe0, 0xaa,
1812	0x78, 0xc4, 0x23, 0x44, 0x59, 0x6b, 0x0e, 0x6a, 0x5d, 0xbf, 0xe8, 0xc9, 0x4f, 0x5f, 0xd0, 0xf8,
1813	0xa8, 0xc1, 0xf5, 0x90, 0xc5, 0xc3, 0xa6, 0xdb, 0xd2, 0x5e, 0xaf, 0x96, 0x69, 0xc2, 0x22, 0x94,
1814	0x10, 0x8f, 0x71, 0xe2, 0x13, 0x9c, 0xc8, 0x5f, 0x69, 0x5f, 0xa5, 0x50, 0x4a, 0xb3, 0x13, 0xff,
1815	0x09, 0x3c, 0x54, 0xc7, 0x2f, 0xfa, 0xec, 0x86, 0x14, 0x76, 0xd6, 0x0a, 0x51, 0x67, 0x35, 0x17,
1816	0x6c, 0x2b, 0xea, 0xec, 0x28, 0xd1, 0x77, 0xfd, 0x9a, 0xca, 0x06, 0x81, 0x4c, 0x07, 0x81, 0xcc,
1817	0x6f, 0x06, 0x41, 0x29, 0xd8, 0x1d, 0x91, 0xcd, 0xee, 0xfd, 0x08, 0x00, 0x00, 0xff, 0xff, 0xc0,
1818	0x26, 0xdb, 0xd5, 0x75, 0x08, 0x00, 0x00,
1819}
1820