1// Code generated by protoc-gen-go. DO NOT EDIT.
2// source: google/cloud/bigquery/datatransfer/v1/datasource.proto
3
4package datatransfer
5
6import (
7	context "context"
8	fmt "fmt"
9	math "math"
10
11	proto "github.com/golang/protobuf/proto"
12	duration "github.com/golang/protobuf/ptypes/duration"
13	empty "github.com/golang/protobuf/ptypes/empty"
14	_ "github.com/golang/protobuf/ptypes/timestamp"
15	wrappers "github.com/golang/protobuf/ptypes/wrappers"
16	_ "google.golang.org/genproto/googleapis/api/annotations"
17	field_mask "google.golang.org/genproto/protobuf/field_mask"
18	grpc "google.golang.org/grpc"
19	codes "google.golang.org/grpc/codes"
20	status "google.golang.org/grpc/status"
21)
22
23// Reference imports to suppress errors if they are not otherwise used.
24var _ = proto.Marshal
25var _ = fmt.Errorf
26var _ = math.Inf
27
28// This is a compile-time assertion to ensure that this generated file
29// is compatible with the proto package it is being compiled against.
30// A compilation error at this line likely means your copy of the
31// proto package needs to be updated.
32const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
33
34// Options for writing to the table.
35// The WRITE_EMPTY option is intentionally excluded from the enum and is not
36// supported by the data transfer service.
37type WriteDisposition int32
38
39const (
40	// The default writeDispostion
41	WriteDisposition_WRITE_DISPOSITION_UNSPECIFIED WriteDisposition = 0
42	// overwrites the table data.
43	WriteDisposition_WRITE_TRUNCATE WriteDisposition = 1
44	// the data is appended to the table.
45	// Note duplication might happen if this mode is used.
46	WriteDisposition_WRITE_APPEND WriteDisposition = 2
47)
48
49var WriteDisposition_name = map[int32]string{
50	0: "WRITE_DISPOSITION_UNSPECIFIED",
51	1: "WRITE_TRUNCATE",
52	2: "WRITE_APPEND",
53}
54
55var WriteDisposition_value = map[string]int32{
56	"WRITE_DISPOSITION_UNSPECIFIED": 0,
57	"WRITE_TRUNCATE":                1,
58	"WRITE_APPEND":                  2,
59}
60
61func (x WriteDisposition) String() string {
62	return proto.EnumName(WriteDisposition_name, int32(x))
63}
64
65func (WriteDisposition) EnumDescriptor() ([]byte, []int) {
66	return fileDescriptor_63170854e2f004ff, []int{0}
67}
68
69// Data format.
70type ImportedDataInfo_Format int32
71
72const (
73	// Unspecified format. In this case, we have to infer the format from the
74	// data source.
75	ImportedDataInfo_FORMAT_UNSPECIFIED ImportedDataInfo_Format = 0
76	// CSV format.
77	ImportedDataInfo_CSV ImportedDataInfo_Format = 1
78	// Newline-delimited JSON.
79	ImportedDataInfo_JSON ImportedDataInfo_Format = 2
80	// Avro format. See http://avro.apache.org .
81	ImportedDataInfo_AVRO ImportedDataInfo_Format = 3
82	// RecordIO.
83	ImportedDataInfo_RECORDIO ImportedDataInfo_Format = 4
84	// ColumnIO.
85	ImportedDataInfo_COLUMNIO ImportedDataInfo_Format = 5
86	// Capacitor.
87	ImportedDataInfo_CAPACITOR ImportedDataInfo_Format = 6
88	// Parquet format. See https://parquet.apache.org .
89	ImportedDataInfo_PARQUET ImportedDataInfo_Format = 7
90	// ORC format. See https://orc.apache.org .
91	ImportedDataInfo_ORC ImportedDataInfo_Format = 8
92)
93
94var ImportedDataInfo_Format_name = map[int32]string{
95	0: "FORMAT_UNSPECIFIED",
96	1: "CSV",
97	2: "JSON",
98	3: "AVRO",
99	4: "RECORDIO",
100	5: "COLUMNIO",
101	6: "CAPACITOR",
102	7: "PARQUET",
103	8: "ORC",
104}
105
106var ImportedDataInfo_Format_value = map[string]int32{
107	"FORMAT_UNSPECIFIED": 0,
108	"CSV":                1,
109	"JSON":               2,
110	"AVRO":               3,
111	"RECORDIO":           4,
112	"COLUMNIO":           5,
113	"CAPACITOR":          6,
114	"PARQUET":            7,
115	"ORC":                8,
116}
117
118func (x ImportedDataInfo_Format) String() string {
119	return proto.EnumName(ImportedDataInfo_Format_name, int32(x))
120}
121
122func (ImportedDataInfo_Format) EnumDescriptor() ([]byte, []int) {
123	return fileDescriptor_63170854e2f004ff, []int{0, 0}
124}
125
126// Encoding of input data in CSV/JSON format.
127type ImportedDataInfo_Encoding int32
128
129const (
130	// Default encoding (UTF8).
131	ImportedDataInfo_ENCODING_UNSPECIFIED ImportedDataInfo_Encoding = 0
132	// ISO_8859_1 encoding.
133	ImportedDataInfo_ISO_8859_1 ImportedDataInfo_Encoding = 1
134	// UTF8 encoding.
135	ImportedDataInfo_UTF8 ImportedDataInfo_Encoding = 2
136)
137
138var ImportedDataInfo_Encoding_name = map[int32]string{
139	0: "ENCODING_UNSPECIFIED",
140	1: "ISO_8859_1",
141	2: "UTF8",
142}
143
144var ImportedDataInfo_Encoding_value = map[string]int32{
145	"ENCODING_UNSPECIFIED": 0,
146	"ISO_8859_1":           1,
147	"UTF8":                 2,
148}
149
150func (x ImportedDataInfo_Encoding) String() string {
151	return proto.EnumName(ImportedDataInfo_Encoding_name, int32(x))
152}
153
154func (ImportedDataInfo_Encoding) EnumDescriptor() ([]byte, []int) {
155	return fileDescriptor_63170854e2f004ff, []int{0, 1}
156}
157
158// LINT.IfChange
159// Field type.
160type ImportedDataInfo_FieldSchema_Type int32
161
162const (
163	// Illegal value.
164	ImportedDataInfo_FieldSchema_TYPE_UNSPECIFIED ImportedDataInfo_FieldSchema_Type = 0
165	// 64K, UTF8.
166	ImportedDataInfo_FieldSchema_STRING ImportedDataInfo_FieldSchema_Type = 1
167	// 64-bit signed.
168	ImportedDataInfo_FieldSchema_INTEGER ImportedDataInfo_FieldSchema_Type = 2
169	// 64-bit IEEE floating point.
170	ImportedDataInfo_FieldSchema_FLOAT ImportedDataInfo_FieldSchema_Type = 3
171	// Aggregate type.
172	ImportedDataInfo_FieldSchema_RECORD ImportedDataInfo_FieldSchema_Type = 4
173	// 64K, Binary.
174	ImportedDataInfo_FieldSchema_BYTES ImportedDataInfo_FieldSchema_Type = 5
175	// 2-valued.
176	ImportedDataInfo_FieldSchema_BOOLEAN ImportedDataInfo_FieldSchema_Type = 6
177	// 64-bit signed usec since UTC epoch.
178	ImportedDataInfo_FieldSchema_TIMESTAMP ImportedDataInfo_FieldSchema_Type = 7
179	// Civil date - Year, Month, Day.
180	ImportedDataInfo_FieldSchema_DATE ImportedDataInfo_FieldSchema_Type = 8
181	// Civil time - Hour, Minute, Second, Microseconds.
182	ImportedDataInfo_FieldSchema_TIME ImportedDataInfo_FieldSchema_Type = 9
183	// Combination of civil date and civil time.
184	ImportedDataInfo_FieldSchema_DATETIME ImportedDataInfo_FieldSchema_Type = 10
185	// Numeric type with 38 decimal digits of precision and 9 decimal digits
186	// of scale.
187	ImportedDataInfo_FieldSchema_NUMERIC ImportedDataInfo_FieldSchema_Type = 11
188	// Geography object (go/googlesql_geography).
189	ImportedDataInfo_FieldSchema_GEOGRAPHY ImportedDataInfo_FieldSchema_Type = 12
190)
191
192var ImportedDataInfo_FieldSchema_Type_name = map[int32]string{
193	0:  "TYPE_UNSPECIFIED",
194	1:  "STRING",
195	2:  "INTEGER",
196	3:  "FLOAT",
197	4:  "RECORD",
198	5:  "BYTES",
199	6:  "BOOLEAN",
200	7:  "TIMESTAMP",
201	8:  "DATE",
202	9:  "TIME",
203	10: "DATETIME",
204	11: "NUMERIC",
205	12: "GEOGRAPHY",
206}
207
208var ImportedDataInfo_FieldSchema_Type_value = map[string]int32{
209	"TYPE_UNSPECIFIED": 0,
210	"STRING":           1,
211	"INTEGER":          2,
212	"FLOAT":            3,
213	"RECORD":           4,
214	"BYTES":            5,
215	"BOOLEAN":          6,
216	"TIMESTAMP":        7,
217	"DATE":             8,
218	"TIME":             9,
219	"DATETIME":         10,
220	"NUMERIC":          11,
221	"GEOGRAPHY":        12,
222}
223
224func (x ImportedDataInfo_FieldSchema_Type) String() string {
225	return proto.EnumName(ImportedDataInfo_FieldSchema_Type_name, int32(x))
226}
227
228func (ImportedDataInfo_FieldSchema_Type) EnumDescriptor() ([]byte, []int) {
229	return fileDescriptor_63170854e2f004ff, []int{0, 0, 0}
230}
231
232// Describes data which should be imported.
233type ImportedDataInfo struct {
234	// SQL query to run. When empty, API checks that there is only one
235	// table_def specified and loads this table. Only Standard SQL queries
236	// are accepted. Legacy SQL is not allowed.
237	Sql string `protobuf:"bytes,1,opt,name=sql,proto3" json:"sql,omitempty"`
238	// Table where results should be written.
239	DestinationTableId string `protobuf:"bytes,2,opt,name=destination_table_id,json=destinationTableId,proto3" json:"destination_table_id,omitempty"`
240	// The description of a destination table. This can be several sentences
241	// or paragraphs describing the table contents in detail.
242	DestinationTableDescription string `protobuf:"bytes,10,opt,name=destination_table_description,json=destinationTableDescription,proto3" json:"destination_table_description,omitempty"`
243	// When used WITHOUT the "sql" parameter, describes the schema of the
244	// destination table.
245	// When used WITH the "sql" parameter, describes tables with data stored
246	// outside of BigQuery.
247	TableDefs []*ImportedDataInfo_TableDefinition `protobuf:"bytes,3,rep,name=table_defs,json=tableDefs,proto3" json:"table_defs,omitempty"`
248	// Inline code for User-defined function resources.
249	// Ignored when "sql" parameter is empty.
250	UserDefinedFunctions []string `protobuf:"bytes,4,rep,name=user_defined_functions,json=userDefinedFunctions,proto3" json:"user_defined_functions,omitempty"`
251	// Specifies the action if the destination table already exists.
252	WriteDisposition     WriteDisposition `protobuf:"varint,6,opt,name=write_disposition,json=writeDisposition,proto3,enum=google.cloud.bigquery.datatransfer.v1.WriteDisposition" json:"write_disposition,omitempty"`
253	XXX_NoUnkeyedLiteral struct{}         `json:"-"`
254	XXX_unrecognized     []byte           `json:"-"`
255	XXX_sizecache        int32            `json:"-"`
256}
257
258func (m *ImportedDataInfo) Reset()         { *m = ImportedDataInfo{} }
259func (m *ImportedDataInfo) String() string { return proto.CompactTextString(m) }
260func (*ImportedDataInfo) ProtoMessage()    {}
261func (*ImportedDataInfo) Descriptor() ([]byte, []int) {
262	return fileDescriptor_63170854e2f004ff, []int{0}
263}
264
265func (m *ImportedDataInfo) XXX_Unmarshal(b []byte) error {
266	return xxx_messageInfo_ImportedDataInfo.Unmarshal(m, b)
267}
268func (m *ImportedDataInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
269	return xxx_messageInfo_ImportedDataInfo.Marshal(b, m, deterministic)
270}
271func (m *ImportedDataInfo) XXX_Merge(src proto.Message) {
272	xxx_messageInfo_ImportedDataInfo.Merge(m, src)
273}
274func (m *ImportedDataInfo) XXX_Size() int {
275	return xxx_messageInfo_ImportedDataInfo.Size(m)
276}
277func (m *ImportedDataInfo) XXX_DiscardUnknown() {
278	xxx_messageInfo_ImportedDataInfo.DiscardUnknown(m)
279}
280
281var xxx_messageInfo_ImportedDataInfo proto.InternalMessageInfo
282
283func (m *ImportedDataInfo) GetSql() string {
284	if m != nil {
285		return m.Sql
286	}
287	return ""
288}
289
290func (m *ImportedDataInfo) GetDestinationTableId() string {
291	if m != nil {
292		return m.DestinationTableId
293	}
294	return ""
295}
296
297func (m *ImportedDataInfo) GetDestinationTableDescription() string {
298	if m != nil {
299		return m.DestinationTableDescription
300	}
301	return ""
302}
303
304func (m *ImportedDataInfo) GetTableDefs() []*ImportedDataInfo_TableDefinition {
305	if m != nil {
306		return m.TableDefs
307	}
308	return nil
309}
310
311func (m *ImportedDataInfo) GetUserDefinedFunctions() []string {
312	if m != nil {
313		return m.UserDefinedFunctions
314	}
315	return nil
316}
317
318func (m *ImportedDataInfo) GetWriteDisposition() WriteDisposition {
319	if m != nil {
320		return m.WriteDisposition
321	}
322	return WriteDisposition_WRITE_DISPOSITION_UNSPECIFIED
323}
324
325// Defines schema of a field in the imported data.
326type ImportedDataInfo_FieldSchema struct {
327	// Field name. Matches: [A-Za-z_][A-Za-z_0-9]{0,127}
328	FieldName string `protobuf:"bytes,1,opt,name=field_name,json=fieldName,proto3" json:"field_name,omitempty"`
329	// Field type
330	Type ImportedDataInfo_FieldSchema_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.bigquery.datatransfer.v1.ImportedDataInfo_FieldSchema_Type" json:"type,omitempty"`
331	// Is field repeated.
332	IsRepeated bool `protobuf:"varint,3,opt,name=is_repeated,json=isRepeated,proto3" json:"is_repeated,omitempty"`
333	// Description for this field.
334	Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"`
335	// Present iff type == RECORD.
336	Schema               *ImportedDataInfo_RecordSchema `protobuf:"bytes,5,opt,name=schema,proto3" json:"schema,omitempty"`
337	XXX_NoUnkeyedLiteral struct{}                       `json:"-"`
338	XXX_unrecognized     []byte                         `json:"-"`
339	XXX_sizecache        int32                          `json:"-"`
340}
341
342func (m *ImportedDataInfo_FieldSchema) Reset()         { *m = ImportedDataInfo_FieldSchema{} }
343func (m *ImportedDataInfo_FieldSchema) String() string { return proto.CompactTextString(m) }
344func (*ImportedDataInfo_FieldSchema) ProtoMessage()    {}
345func (*ImportedDataInfo_FieldSchema) Descriptor() ([]byte, []int) {
346	return fileDescriptor_63170854e2f004ff, []int{0, 0}
347}
348
349func (m *ImportedDataInfo_FieldSchema) XXX_Unmarshal(b []byte) error {
350	return xxx_messageInfo_ImportedDataInfo_FieldSchema.Unmarshal(m, b)
351}
352func (m *ImportedDataInfo_FieldSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
353	return xxx_messageInfo_ImportedDataInfo_FieldSchema.Marshal(b, m, deterministic)
354}
355func (m *ImportedDataInfo_FieldSchema) XXX_Merge(src proto.Message) {
356	xxx_messageInfo_ImportedDataInfo_FieldSchema.Merge(m, src)
357}
358func (m *ImportedDataInfo_FieldSchema) XXX_Size() int {
359	return xxx_messageInfo_ImportedDataInfo_FieldSchema.Size(m)
360}
361func (m *ImportedDataInfo_FieldSchema) XXX_DiscardUnknown() {
362	xxx_messageInfo_ImportedDataInfo_FieldSchema.DiscardUnknown(m)
363}
364
365var xxx_messageInfo_ImportedDataInfo_FieldSchema proto.InternalMessageInfo
366
367func (m *ImportedDataInfo_FieldSchema) GetFieldName() string {
368	if m != nil {
369		return m.FieldName
370	}
371	return ""
372}
373
374func (m *ImportedDataInfo_FieldSchema) GetType() ImportedDataInfo_FieldSchema_Type {
375	if m != nil {
376		return m.Type
377	}
378	return ImportedDataInfo_FieldSchema_TYPE_UNSPECIFIED
379}
380
381func (m *ImportedDataInfo_FieldSchema) GetIsRepeated() bool {
382	if m != nil {
383		return m.IsRepeated
384	}
385	return false
386}
387
388func (m *ImportedDataInfo_FieldSchema) GetDescription() string {
389	if m != nil {
390		return m.Description
391	}
392	return ""
393}
394
395func (m *ImportedDataInfo_FieldSchema) GetSchema() *ImportedDataInfo_RecordSchema {
396	if m != nil {
397		return m.Schema
398	}
399	return nil
400}
401
402// Describes schema of the data to be ingested.
403type ImportedDataInfo_RecordSchema struct {
404	// One field per column in the record.
405	Fields               []*ImportedDataInfo_FieldSchema `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty"`
406	XXX_NoUnkeyedLiteral struct{}                        `json:"-"`
407	XXX_unrecognized     []byte                          `json:"-"`
408	XXX_sizecache        int32                           `json:"-"`
409}
410
411func (m *ImportedDataInfo_RecordSchema) Reset()         { *m = ImportedDataInfo_RecordSchema{} }
412func (m *ImportedDataInfo_RecordSchema) String() string { return proto.CompactTextString(m) }
413func (*ImportedDataInfo_RecordSchema) ProtoMessage()    {}
414func (*ImportedDataInfo_RecordSchema) Descriptor() ([]byte, []int) {
415	return fileDescriptor_63170854e2f004ff, []int{0, 1}
416}
417
418func (m *ImportedDataInfo_RecordSchema) XXX_Unmarshal(b []byte) error {
419	return xxx_messageInfo_ImportedDataInfo_RecordSchema.Unmarshal(m, b)
420}
421func (m *ImportedDataInfo_RecordSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
422	return xxx_messageInfo_ImportedDataInfo_RecordSchema.Marshal(b, m, deterministic)
423}
424func (m *ImportedDataInfo_RecordSchema) XXX_Merge(src proto.Message) {
425	xxx_messageInfo_ImportedDataInfo_RecordSchema.Merge(m, src)
426}
427func (m *ImportedDataInfo_RecordSchema) XXX_Size() int {
428	return xxx_messageInfo_ImportedDataInfo_RecordSchema.Size(m)
429}
430func (m *ImportedDataInfo_RecordSchema) XXX_DiscardUnknown() {
431	xxx_messageInfo_ImportedDataInfo_RecordSchema.DiscardUnknown(m)
432}
433
434var xxx_messageInfo_ImportedDataInfo_RecordSchema proto.InternalMessageInfo
435
436func (m *ImportedDataInfo_RecordSchema) GetFields() []*ImportedDataInfo_FieldSchema {
437	if m != nil {
438		return m.Fields
439	}
440	return nil
441}
442
443// External table definition. These tables can be referenced with 'name'
444// in the query and can be read just like any other table.
445type ImportedDataInfo_TableDefinition struct {
446	// BigQuery table_id (required). This will be used to reference this
447	// table in the query.
448	TableId string `protobuf:"bytes,1,opt,name=table_id,json=tableId,proto3" json:"table_id,omitempty"`
449	// URIs for the data to be imported. All URIs must be from the same storage
450	// system.
451	SourceUris []string `protobuf:"bytes,2,rep,name=source_uris,json=sourceUris,proto3" json:"source_uris,omitempty"`
452	// Describes the format of the data in source_uri.
453	Format ImportedDataInfo_Format `protobuf:"varint,3,opt,name=format,proto3,enum=google.cloud.bigquery.datatransfer.v1.ImportedDataInfo_Format" json:"format,omitempty"`
454	// Specify the maximum number of bad records that can be ignored.
455	// If bad records exceed this threshold the query is aborted.
456	MaxBadRecords int32 `protobuf:"varint,4,opt,name=max_bad_records,json=maxBadRecords,proto3" json:"max_bad_records,omitempty"`
457	// Character encoding of the input when applicable (CSV, JSON).
458	// Defaults to UTF8.
459	Encoding ImportedDataInfo_Encoding `protobuf:"varint,5,opt,name=encoding,proto3,enum=google.cloud.bigquery.datatransfer.v1.ImportedDataInfo_Encoding" json:"encoding,omitempty"`
460	// CSV specific options.
461	CsvOptions *ImportedDataInfo_TableDefinition_CsvOptions `protobuf:"bytes,6,opt,name=csv_options,json=csvOptions,proto3" json:"csv_options,omitempty"`
462	// Optional schema for the data. When not specified for JSON and CSV formats
463	// we will try to detect it automatically.
464	Schema *ImportedDataInfo_RecordSchema `protobuf:"bytes,7,opt,name=schema,proto3" json:"schema,omitempty"`
465	// Indicates if extra values that are not represented in the table schema is
466	// allowed.
467	IgnoreUnknownValues  *wrappers.BoolValue `protobuf:"bytes,10,opt,name=ignore_unknown_values,json=ignoreUnknownValues,proto3" json:"ignore_unknown_values,omitempty"`
468	XXX_NoUnkeyedLiteral struct{}            `json:"-"`
469	XXX_unrecognized     []byte              `json:"-"`
470	XXX_sizecache        int32               `json:"-"`
471}
472
473func (m *ImportedDataInfo_TableDefinition) Reset()         { *m = ImportedDataInfo_TableDefinition{} }
474func (m *ImportedDataInfo_TableDefinition) String() string { return proto.CompactTextString(m) }
475func (*ImportedDataInfo_TableDefinition) ProtoMessage()    {}
476func (*ImportedDataInfo_TableDefinition) Descriptor() ([]byte, []int) {
477	return fileDescriptor_63170854e2f004ff, []int{0, 2}
478}
479
480func (m *ImportedDataInfo_TableDefinition) XXX_Unmarshal(b []byte) error {
481	return xxx_messageInfo_ImportedDataInfo_TableDefinition.Unmarshal(m, b)
482}
483func (m *ImportedDataInfo_TableDefinition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
484	return xxx_messageInfo_ImportedDataInfo_TableDefinition.Marshal(b, m, deterministic)
485}
486func (m *ImportedDataInfo_TableDefinition) XXX_Merge(src proto.Message) {
487	xxx_messageInfo_ImportedDataInfo_TableDefinition.Merge(m, src)
488}
489func (m *ImportedDataInfo_TableDefinition) XXX_Size() int {
490	return xxx_messageInfo_ImportedDataInfo_TableDefinition.Size(m)
491}
492func (m *ImportedDataInfo_TableDefinition) XXX_DiscardUnknown() {
493	xxx_messageInfo_ImportedDataInfo_TableDefinition.DiscardUnknown(m)
494}
495
496var xxx_messageInfo_ImportedDataInfo_TableDefinition proto.InternalMessageInfo
497
498func (m *ImportedDataInfo_TableDefinition) GetTableId() string {
499	if m != nil {
500		return m.TableId
501	}
502	return ""
503}
504
505func (m *ImportedDataInfo_TableDefinition) GetSourceUris() []string {
506	if m != nil {
507		return m.SourceUris
508	}
509	return nil
510}
511
512func (m *ImportedDataInfo_TableDefinition) GetFormat() ImportedDataInfo_Format {
513	if m != nil {
514		return m.Format
515	}
516	return ImportedDataInfo_FORMAT_UNSPECIFIED
517}
518
519func (m *ImportedDataInfo_TableDefinition) GetMaxBadRecords() int32 {
520	if m != nil {
521		return m.MaxBadRecords
522	}
523	return 0
524}
525
526func (m *ImportedDataInfo_TableDefinition) GetEncoding() ImportedDataInfo_Encoding {
527	if m != nil {
528		return m.Encoding
529	}
530	return ImportedDataInfo_ENCODING_UNSPECIFIED
531}
532
533func (m *ImportedDataInfo_TableDefinition) GetCsvOptions() *ImportedDataInfo_TableDefinition_CsvOptions {
534	if m != nil {
535		return m.CsvOptions
536	}
537	return nil
538}
539
540func (m *ImportedDataInfo_TableDefinition) GetSchema() *ImportedDataInfo_RecordSchema {
541	if m != nil {
542		return m.Schema
543	}
544	return nil
545}
546
547func (m *ImportedDataInfo_TableDefinition) GetIgnoreUnknownValues() *wrappers.BoolValue {
548	if m != nil {
549		return m.IgnoreUnknownValues
550	}
551	return nil
552}
553
554// CSV specific options.
555type ImportedDataInfo_TableDefinition_CsvOptions struct {
556	// The delimiter.  We currently restrict this to U+0001 to U+00FF and
557	// apply additional constraints during validation.
558	FieldDelimiter *wrappers.StringValue `protobuf:"bytes,1,opt,name=field_delimiter,json=fieldDelimiter,proto3" json:"field_delimiter,omitempty"`
559	// Whether CSV files are allowed to have quoted newlines. If quoted
560	// newlines are allowed, we can't split CSV files.
561	AllowQuotedNewlines *wrappers.BoolValue `protobuf:"bytes,2,opt,name=allow_quoted_newlines,json=allowQuotedNewlines,proto3" json:"allow_quoted_newlines,omitempty"`
562	// The quote character.  We currently restrict this to U+0000 to U+00FF
563	// and apply additional constraints during validation. Set to '\0' to
564	// indicate no quote is used.
565	QuoteChar *wrappers.StringValue `protobuf:"bytes,3,opt,name=quote_char,json=quoteChar,proto3" json:"quote_char,omitempty"`
566	// Number of leading rows to skip.
567	SkipLeadingRows *wrappers.Int64Value `protobuf:"bytes,4,opt,name=skip_leading_rows,json=skipLeadingRows,proto3" json:"skip_leading_rows,omitempty"`
568	// Accept rows that are missing trailing optional columns.
569	AllowJaggedRows      *wrappers.BoolValue `protobuf:"bytes,5,opt,name=allow_jagged_rows,json=allowJaggedRows,proto3" json:"allow_jagged_rows,omitempty"`
570	XXX_NoUnkeyedLiteral struct{}            `json:"-"`
571	XXX_unrecognized     []byte              `json:"-"`
572	XXX_sizecache        int32               `json:"-"`
573}
574
575func (m *ImportedDataInfo_TableDefinition_CsvOptions) Reset() {
576	*m = ImportedDataInfo_TableDefinition_CsvOptions{}
577}
578func (m *ImportedDataInfo_TableDefinition_CsvOptions) String() string {
579	return proto.CompactTextString(m)
580}
581func (*ImportedDataInfo_TableDefinition_CsvOptions) ProtoMessage() {}
582func (*ImportedDataInfo_TableDefinition_CsvOptions) Descriptor() ([]byte, []int) {
583	return fileDescriptor_63170854e2f004ff, []int{0, 2, 0}
584}
585
586func (m *ImportedDataInfo_TableDefinition_CsvOptions) XXX_Unmarshal(b []byte) error {
587	return xxx_messageInfo_ImportedDataInfo_TableDefinition_CsvOptions.Unmarshal(m, b)
588}
589func (m *ImportedDataInfo_TableDefinition_CsvOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
590	return xxx_messageInfo_ImportedDataInfo_TableDefinition_CsvOptions.Marshal(b, m, deterministic)
591}
592func (m *ImportedDataInfo_TableDefinition_CsvOptions) XXX_Merge(src proto.Message) {
593	xxx_messageInfo_ImportedDataInfo_TableDefinition_CsvOptions.Merge(m, src)
594}
595func (m *ImportedDataInfo_TableDefinition_CsvOptions) XXX_Size() int {
596	return xxx_messageInfo_ImportedDataInfo_TableDefinition_CsvOptions.Size(m)
597}
598func (m *ImportedDataInfo_TableDefinition_CsvOptions) XXX_DiscardUnknown() {
599	xxx_messageInfo_ImportedDataInfo_TableDefinition_CsvOptions.DiscardUnknown(m)
600}
601
602var xxx_messageInfo_ImportedDataInfo_TableDefinition_CsvOptions proto.InternalMessageInfo
603
604func (m *ImportedDataInfo_TableDefinition_CsvOptions) GetFieldDelimiter() *wrappers.StringValue {
605	if m != nil {
606		return m.FieldDelimiter
607	}
608	return nil
609}
610
611func (m *ImportedDataInfo_TableDefinition_CsvOptions) GetAllowQuotedNewlines() *wrappers.BoolValue {
612	if m != nil {
613		return m.AllowQuotedNewlines
614	}
615	return nil
616}
617
618func (m *ImportedDataInfo_TableDefinition_CsvOptions) GetQuoteChar() *wrappers.StringValue {
619	if m != nil {
620		return m.QuoteChar
621	}
622	return nil
623}
624
625func (m *ImportedDataInfo_TableDefinition_CsvOptions) GetSkipLeadingRows() *wrappers.Int64Value {
626	if m != nil {
627		return m.SkipLeadingRows
628	}
629	return nil
630}
631
632func (m *ImportedDataInfo_TableDefinition_CsvOptions) GetAllowJaggedRows() *wrappers.BoolValue {
633	if m != nil {
634		return m.AllowJaggedRows
635	}
636	return nil
637}
638
639// A request to update a transfer run.
640type UpdateTransferRunRequest struct {
641	// Run name must be set and correspond to an already existing run. Only
642	// state, error_status, and data_version fields will be updated. All other
643	// fields will be ignored.
644	TransferRun *TransferRun `protobuf:"bytes,1,opt,name=transfer_run,json=transferRun,proto3" json:"transfer_run,omitempty"`
645	// Required list of fields to be updated in this request.
646	UpdateMask           *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"`
647	XXX_NoUnkeyedLiteral struct{}              `json:"-"`
648	XXX_unrecognized     []byte                `json:"-"`
649	XXX_sizecache        int32                 `json:"-"`
650}
651
652func (m *UpdateTransferRunRequest) Reset()         { *m = UpdateTransferRunRequest{} }
653func (m *UpdateTransferRunRequest) String() string { return proto.CompactTextString(m) }
654func (*UpdateTransferRunRequest) ProtoMessage()    {}
655func (*UpdateTransferRunRequest) Descriptor() ([]byte, []int) {
656	return fileDescriptor_63170854e2f004ff, []int{1}
657}
658
659func (m *UpdateTransferRunRequest) XXX_Unmarshal(b []byte) error {
660	return xxx_messageInfo_UpdateTransferRunRequest.Unmarshal(m, b)
661}
662func (m *UpdateTransferRunRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
663	return xxx_messageInfo_UpdateTransferRunRequest.Marshal(b, m, deterministic)
664}
665func (m *UpdateTransferRunRequest) XXX_Merge(src proto.Message) {
666	xxx_messageInfo_UpdateTransferRunRequest.Merge(m, src)
667}
668func (m *UpdateTransferRunRequest) XXX_Size() int {
669	return xxx_messageInfo_UpdateTransferRunRequest.Size(m)
670}
671func (m *UpdateTransferRunRequest) XXX_DiscardUnknown() {
672	xxx_messageInfo_UpdateTransferRunRequest.DiscardUnknown(m)
673}
674
675var xxx_messageInfo_UpdateTransferRunRequest proto.InternalMessageInfo
676
677func (m *UpdateTransferRunRequest) GetTransferRun() *TransferRun {
678	if m != nil {
679		return m.TransferRun
680	}
681	return nil
682}
683
684func (m *UpdateTransferRunRequest) GetUpdateMask() *field_mask.FieldMask {
685	if m != nil {
686		return m.UpdateMask
687	}
688	return nil
689}
690
691// A request to add transfer status messages to the run.
692type LogTransferRunMessagesRequest struct {
693	// Name of the resource in the form:
694	// "projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}"
695	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
696	// Messages to append.
697	TransferMessages     []*TransferMessage `protobuf:"bytes,2,rep,name=transfer_messages,json=transferMessages,proto3" json:"transfer_messages,omitempty"`
698	XXX_NoUnkeyedLiteral struct{}           `json:"-"`
699	XXX_unrecognized     []byte             `json:"-"`
700	XXX_sizecache        int32              `json:"-"`
701}
702
703func (m *LogTransferRunMessagesRequest) Reset()         { *m = LogTransferRunMessagesRequest{} }
704func (m *LogTransferRunMessagesRequest) String() string { return proto.CompactTextString(m) }
705func (*LogTransferRunMessagesRequest) ProtoMessage()    {}
706func (*LogTransferRunMessagesRequest) Descriptor() ([]byte, []int) {
707	return fileDescriptor_63170854e2f004ff, []int{2}
708}
709
710func (m *LogTransferRunMessagesRequest) XXX_Unmarshal(b []byte) error {
711	return xxx_messageInfo_LogTransferRunMessagesRequest.Unmarshal(m, b)
712}
713func (m *LogTransferRunMessagesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
714	return xxx_messageInfo_LogTransferRunMessagesRequest.Marshal(b, m, deterministic)
715}
716func (m *LogTransferRunMessagesRequest) XXX_Merge(src proto.Message) {
717	xxx_messageInfo_LogTransferRunMessagesRequest.Merge(m, src)
718}
719func (m *LogTransferRunMessagesRequest) XXX_Size() int {
720	return xxx_messageInfo_LogTransferRunMessagesRequest.Size(m)
721}
722func (m *LogTransferRunMessagesRequest) XXX_DiscardUnknown() {
723	xxx_messageInfo_LogTransferRunMessagesRequest.DiscardUnknown(m)
724}
725
726var xxx_messageInfo_LogTransferRunMessagesRequest proto.InternalMessageInfo
727
728func (m *LogTransferRunMessagesRequest) GetName() string {
729	if m != nil {
730		return m.Name
731	}
732	return ""
733}
734
735func (m *LogTransferRunMessagesRequest) GetTransferMessages() []*TransferMessage {
736	if m != nil {
737		return m.TransferMessages
738	}
739	return nil
740}
741
742// A request to start and monitor a BigQuery load job.
743type StartBigQueryJobsRequest struct {
744	// Name of the resource in the form:
745	// "projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}"
746	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
747	// Import jobs which should be started and monitored.
748	ImportedData []*ImportedDataInfo `protobuf:"bytes,2,rep,name=imported_data,json=importedData,proto3" json:"imported_data,omitempty"`
749	// User credentials which should be used to start/monitor
750	// BigQuery jobs. If not specified, then jobs
751	// are started using data source service account credentials.
752	// This may be OAuth token or JWT token.
753	UserCredentials []byte `protobuf:"bytes,3,opt,name=user_credentials,json=userCredentials,proto3" json:"user_credentials,omitempty"`
754	// The number of BQ Jobs that can run in parallel.
755	MaxParallelism       int32    `protobuf:"varint,8,opt,name=max_parallelism,json=maxParallelism,proto3" json:"max_parallelism,omitempty"`
756	XXX_NoUnkeyedLiteral struct{} `json:"-"`
757	XXX_unrecognized     []byte   `json:"-"`
758	XXX_sizecache        int32    `json:"-"`
759}
760
761func (m *StartBigQueryJobsRequest) Reset()         { *m = StartBigQueryJobsRequest{} }
762func (m *StartBigQueryJobsRequest) String() string { return proto.CompactTextString(m) }
763func (*StartBigQueryJobsRequest) ProtoMessage()    {}
764func (*StartBigQueryJobsRequest) Descriptor() ([]byte, []int) {
765	return fileDescriptor_63170854e2f004ff, []int{3}
766}
767
768func (m *StartBigQueryJobsRequest) XXX_Unmarshal(b []byte) error {
769	return xxx_messageInfo_StartBigQueryJobsRequest.Unmarshal(m, b)
770}
771func (m *StartBigQueryJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
772	return xxx_messageInfo_StartBigQueryJobsRequest.Marshal(b, m, deterministic)
773}
774func (m *StartBigQueryJobsRequest) XXX_Merge(src proto.Message) {
775	xxx_messageInfo_StartBigQueryJobsRequest.Merge(m, src)
776}
777func (m *StartBigQueryJobsRequest) XXX_Size() int {
778	return xxx_messageInfo_StartBigQueryJobsRequest.Size(m)
779}
780func (m *StartBigQueryJobsRequest) XXX_DiscardUnknown() {
781	xxx_messageInfo_StartBigQueryJobsRequest.DiscardUnknown(m)
782}
783
784var xxx_messageInfo_StartBigQueryJobsRequest proto.InternalMessageInfo
785
786func (m *StartBigQueryJobsRequest) GetName() string {
787	if m != nil {
788		return m.Name
789	}
790	return ""
791}
792
793func (m *StartBigQueryJobsRequest) GetImportedData() []*ImportedDataInfo {
794	if m != nil {
795		return m.ImportedData
796	}
797	return nil
798}
799
800func (m *StartBigQueryJobsRequest) GetUserCredentials() []byte {
801	if m != nil {
802		return m.UserCredentials
803	}
804	return nil
805}
806
807func (m *StartBigQueryJobsRequest) GetMaxParallelism() int32 {
808	if m != nil {
809		return m.MaxParallelism
810	}
811	return 0
812}
813
814// A request to finish a run.
815type FinishRunRequest struct {
816	// Name of the resource in the form:
817	// "projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}"
818	Name                 string   `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
819	XXX_NoUnkeyedLiteral struct{} `json:"-"`
820	XXX_unrecognized     []byte   `json:"-"`
821	XXX_sizecache        int32    `json:"-"`
822}
823
824func (m *FinishRunRequest) Reset()         { *m = FinishRunRequest{} }
825func (m *FinishRunRequest) String() string { return proto.CompactTextString(m) }
826func (*FinishRunRequest) ProtoMessage()    {}
827func (*FinishRunRequest) Descriptor() ([]byte, []int) {
828	return fileDescriptor_63170854e2f004ff, []int{4}
829}
830
831func (m *FinishRunRequest) XXX_Unmarshal(b []byte) error {
832	return xxx_messageInfo_FinishRunRequest.Unmarshal(m, b)
833}
834func (m *FinishRunRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
835	return xxx_messageInfo_FinishRunRequest.Marshal(b, m, deterministic)
836}
837func (m *FinishRunRequest) XXX_Merge(src proto.Message) {
838	xxx_messageInfo_FinishRunRequest.Merge(m, src)
839}
840func (m *FinishRunRequest) XXX_Size() int {
841	return xxx_messageInfo_FinishRunRequest.Size(m)
842}
843func (m *FinishRunRequest) XXX_DiscardUnknown() {
844	xxx_messageInfo_FinishRunRequest.DiscardUnknown(m)
845}
846
847var xxx_messageInfo_FinishRunRequest proto.InternalMessageInfo
848
849func (m *FinishRunRequest) GetName() string {
850	if m != nil {
851		return m.Name
852	}
853	return ""
854}
855
856// Represents the request of the CreateDataSourceDefinition method.
857type CreateDataSourceDefinitionRequest struct {
858	// The BigQuery project id for which data source definition is associated.
859	// Must be in the form: `projects/{project_id}/locations/{location_id}`
860	Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
861	// Data source definition.
862	DataSourceDefinition *DataSourceDefinition `protobuf:"bytes,2,opt,name=data_source_definition,json=dataSourceDefinition,proto3" json:"data_source_definition,omitempty"`
863	XXX_NoUnkeyedLiteral struct{}              `json:"-"`
864	XXX_unrecognized     []byte                `json:"-"`
865	XXX_sizecache        int32                 `json:"-"`
866}
867
868func (m *CreateDataSourceDefinitionRequest) Reset()         { *m = CreateDataSourceDefinitionRequest{} }
869func (m *CreateDataSourceDefinitionRequest) String() string { return proto.CompactTextString(m) }
870func (*CreateDataSourceDefinitionRequest) ProtoMessage()    {}
871func (*CreateDataSourceDefinitionRequest) Descriptor() ([]byte, []int) {
872	return fileDescriptor_63170854e2f004ff, []int{5}
873}
874
875func (m *CreateDataSourceDefinitionRequest) XXX_Unmarshal(b []byte) error {
876	return xxx_messageInfo_CreateDataSourceDefinitionRequest.Unmarshal(m, b)
877}
878func (m *CreateDataSourceDefinitionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
879	return xxx_messageInfo_CreateDataSourceDefinitionRequest.Marshal(b, m, deterministic)
880}
881func (m *CreateDataSourceDefinitionRequest) XXX_Merge(src proto.Message) {
882	xxx_messageInfo_CreateDataSourceDefinitionRequest.Merge(m, src)
883}
884func (m *CreateDataSourceDefinitionRequest) XXX_Size() int {
885	return xxx_messageInfo_CreateDataSourceDefinitionRequest.Size(m)
886}
887func (m *CreateDataSourceDefinitionRequest) XXX_DiscardUnknown() {
888	xxx_messageInfo_CreateDataSourceDefinitionRequest.DiscardUnknown(m)
889}
890
891var xxx_messageInfo_CreateDataSourceDefinitionRequest proto.InternalMessageInfo
892
893func (m *CreateDataSourceDefinitionRequest) GetParent() string {
894	if m != nil {
895		return m.Parent
896	}
897	return ""
898}
899
900func (m *CreateDataSourceDefinitionRequest) GetDataSourceDefinition() *DataSourceDefinition {
901	if m != nil {
902		return m.DataSourceDefinition
903	}
904	return nil
905}
906
907// Represents the request of the UpdateDataSourceDefinition method.
908type UpdateDataSourceDefinitionRequest struct {
909	// Data source definition.
910	DataSourceDefinition *DataSourceDefinition `protobuf:"bytes,1,opt,name=data_source_definition,json=dataSourceDefinition,proto3" json:"data_source_definition,omitempty"`
911	// Update field mask.
912	UpdateMask           *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"`
913	XXX_NoUnkeyedLiteral struct{}              `json:"-"`
914	XXX_unrecognized     []byte                `json:"-"`
915	XXX_sizecache        int32                 `json:"-"`
916}
917
918func (m *UpdateDataSourceDefinitionRequest) Reset()         { *m = UpdateDataSourceDefinitionRequest{} }
919func (m *UpdateDataSourceDefinitionRequest) String() string { return proto.CompactTextString(m) }
920func (*UpdateDataSourceDefinitionRequest) ProtoMessage()    {}
921func (*UpdateDataSourceDefinitionRequest) Descriptor() ([]byte, []int) {
922	return fileDescriptor_63170854e2f004ff, []int{6}
923}
924
925func (m *UpdateDataSourceDefinitionRequest) XXX_Unmarshal(b []byte) error {
926	return xxx_messageInfo_UpdateDataSourceDefinitionRequest.Unmarshal(m, b)
927}
928func (m *UpdateDataSourceDefinitionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
929	return xxx_messageInfo_UpdateDataSourceDefinitionRequest.Marshal(b, m, deterministic)
930}
931func (m *UpdateDataSourceDefinitionRequest) XXX_Merge(src proto.Message) {
932	xxx_messageInfo_UpdateDataSourceDefinitionRequest.Merge(m, src)
933}
934func (m *UpdateDataSourceDefinitionRequest) XXX_Size() int {
935	return xxx_messageInfo_UpdateDataSourceDefinitionRequest.Size(m)
936}
937func (m *UpdateDataSourceDefinitionRequest) XXX_DiscardUnknown() {
938	xxx_messageInfo_UpdateDataSourceDefinitionRequest.DiscardUnknown(m)
939}
940
941var xxx_messageInfo_UpdateDataSourceDefinitionRequest proto.InternalMessageInfo
942
943func (m *UpdateDataSourceDefinitionRequest) GetDataSourceDefinition() *DataSourceDefinition {
944	if m != nil {
945		return m.DataSourceDefinition
946	}
947	return nil
948}
949
950func (m *UpdateDataSourceDefinitionRequest) GetUpdateMask() *field_mask.FieldMask {
951	if m != nil {
952		return m.UpdateMask
953	}
954	return nil
955}
956
957// Represents the request of the DeleteDataSourceDefinition method. All transfer
958// configs associated with the data source must be deleted first, before the
959// data source can be deleted.
960type DeleteDataSourceDefinitionRequest struct {
961	// The field will contain name of the resource requested, for example:
962	// `projects/{project_id}/locations/{location_id}/dataSourceDefinitions/{data_source_id}`
963	Name                 string   `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
964	XXX_NoUnkeyedLiteral struct{} `json:"-"`
965	XXX_unrecognized     []byte   `json:"-"`
966	XXX_sizecache        int32    `json:"-"`
967}
968
969func (m *DeleteDataSourceDefinitionRequest) Reset()         { *m = DeleteDataSourceDefinitionRequest{} }
970func (m *DeleteDataSourceDefinitionRequest) String() string { return proto.CompactTextString(m) }
971func (*DeleteDataSourceDefinitionRequest) ProtoMessage()    {}
972func (*DeleteDataSourceDefinitionRequest) Descriptor() ([]byte, []int) {
973	return fileDescriptor_63170854e2f004ff, []int{7}
974}
975
976func (m *DeleteDataSourceDefinitionRequest) XXX_Unmarshal(b []byte) error {
977	return xxx_messageInfo_DeleteDataSourceDefinitionRequest.Unmarshal(m, b)
978}
979func (m *DeleteDataSourceDefinitionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
980	return xxx_messageInfo_DeleteDataSourceDefinitionRequest.Marshal(b, m, deterministic)
981}
982func (m *DeleteDataSourceDefinitionRequest) XXX_Merge(src proto.Message) {
983	xxx_messageInfo_DeleteDataSourceDefinitionRequest.Merge(m, src)
984}
985func (m *DeleteDataSourceDefinitionRequest) XXX_Size() int {
986	return xxx_messageInfo_DeleteDataSourceDefinitionRequest.Size(m)
987}
988func (m *DeleteDataSourceDefinitionRequest) XXX_DiscardUnknown() {
989	xxx_messageInfo_DeleteDataSourceDefinitionRequest.DiscardUnknown(m)
990}
991
992var xxx_messageInfo_DeleteDataSourceDefinitionRequest proto.InternalMessageInfo
993
994func (m *DeleteDataSourceDefinitionRequest) GetName() string {
995	if m != nil {
996		return m.Name
997	}
998	return ""
999}
1000
1001// Represents the request of the GetDataSourceDefinition method.
1002type GetDataSourceDefinitionRequest struct {
1003	// The field will contain name of the resource requested.
1004	Name                 string   `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
1005	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1006	XXX_unrecognized     []byte   `json:"-"`
1007	XXX_sizecache        int32    `json:"-"`
1008}
1009
1010func (m *GetDataSourceDefinitionRequest) Reset()         { *m = GetDataSourceDefinitionRequest{} }
1011func (m *GetDataSourceDefinitionRequest) String() string { return proto.CompactTextString(m) }
1012func (*GetDataSourceDefinitionRequest) ProtoMessage()    {}
1013func (*GetDataSourceDefinitionRequest) Descriptor() ([]byte, []int) {
1014	return fileDescriptor_63170854e2f004ff, []int{8}
1015}
1016
1017func (m *GetDataSourceDefinitionRequest) XXX_Unmarshal(b []byte) error {
1018	return xxx_messageInfo_GetDataSourceDefinitionRequest.Unmarshal(m, b)
1019}
1020func (m *GetDataSourceDefinitionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1021	return xxx_messageInfo_GetDataSourceDefinitionRequest.Marshal(b, m, deterministic)
1022}
1023func (m *GetDataSourceDefinitionRequest) XXX_Merge(src proto.Message) {
1024	xxx_messageInfo_GetDataSourceDefinitionRequest.Merge(m, src)
1025}
1026func (m *GetDataSourceDefinitionRequest) XXX_Size() int {
1027	return xxx_messageInfo_GetDataSourceDefinitionRequest.Size(m)
1028}
1029func (m *GetDataSourceDefinitionRequest) XXX_DiscardUnknown() {
1030	xxx_messageInfo_GetDataSourceDefinitionRequest.DiscardUnknown(m)
1031}
1032
1033var xxx_messageInfo_GetDataSourceDefinitionRequest proto.InternalMessageInfo
1034
1035func (m *GetDataSourceDefinitionRequest) GetName() string {
1036	if m != nil {
1037		return m.Name
1038	}
1039	return ""
1040}
1041
1042// Represents the request of the ListDataSourceDefinitions method.
1043type ListDataSourceDefinitionsRequest struct {
1044	// The BigQuery project id for which data sources should be returned.
1045	// Must be in the form: `projects/{project_id}/locations/{location_id}`
1046	Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
1047	// Pagination token, which can be used to request a specific page
1048	// of `ListDataSourceDefinitionsRequest` list results. For multiple-page
1049	// results, `ListDataSourceDefinitionsResponse` outputs a `next_page` token,
1050	// which can be used as the `page_token` value to request the next page of
1051	// the list results.
1052	PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
1053	// Page size. The default page size is the maximum value of 1000 results.
1054	PageSize             int32    `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
1055	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1056	XXX_unrecognized     []byte   `json:"-"`
1057	XXX_sizecache        int32    `json:"-"`
1058}
1059
1060func (m *ListDataSourceDefinitionsRequest) Reset()         { *m = ListDataSourceDefinitionsRequest{} }
1061func (m *ListDataSourceDefinitionsRequest) String() string { return proto.CompactTextString(m) }
1062func (*ListDataSourceDefinitionsRequest) ProtoMessage()    {}
1063func (*ListDataSourceDefinitionsRequest) Descriptor() ([]byte, []int) {
1064	return fileDescriptor_63170854e2f004ff, []int{9}
1065}
1066
1067func (m *ListDataSourceDefinitionsRequest) XXX_Unmarshal(b []byte) error {
1068	return xxx_messageInfo_ListDataSourceDefinitionsRequest.Unmarshal(m, b)
1069}
1070func (m *ListDataSourceDefinitionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1071	return xxx_messageInfo_ListDataSourceDefinitionsRequest.Marshal(b, m, deterministic)
1072}
1073func (m *ListDataSourceDefinitionsRequest) XXX_Merge(src proto.Message) {
1074	xxx_messageInfo_ListDataSourceDefinitionsRequest.Merge(m, src)
1075}
1076func (m *ListDataSourceDefinitionsRequest) XXX_Size() int {
1077	return xxx_messageInfo_ListDataSourceDefinitionsRequest.Size(m)
1078}
1079func (m *ListDataSourceDefinitionsRequest) XXX_DiscardUnknown() {
1080	xxx_messageInfo_ListDataSourceDefinitionsRequest.DiscardUnknown(m)
1081}
1082
1083var xxx_messageInfo_ListDataSourceDefinitionsRequest proto.InternalMessageInfo
1084
1085func (m *ListDataSourceDefinitionsRequest) GetParent() string {
1086	if m != nil {
1087		return m.Parent
1088	}
1089	return ""
1090}
1091
1092func (m *ListDataSourceDefinitionsRequest) GetPageToken() string {
1093	if m != nil {
1094		return m.PageToken
1095	}
1096	return ""
1097}
1098
1099func (m *ListDataSourceDefinitionsRequest) GetPageSize() int32 {
1100	if m != nil {
1101		return m.PageSize
1102	}
1103	return 0
1104}
1105
1106// Returns a list of supported data source definitions.
1107type ListDataSourceDefinitionsResponse struct {
1108	// List of supported data source definitions.
1109	DataSourceDefinitions []*DataSourceDefinition `protobuf:"bytes,1,rep,name=data_source_definitions,json=dataSourceDefinitions,proto3" json:"data_source_definitions,omitempty"`
1110	// Output only. The next-pagination token. For multiple-page list results,
1111	// this token can be used as the
1112	// `ListDataSourceDefinitionsRequest.page_token`
1113	// to request the next page of the list results.
1114	NextPageToken        string   `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
1115	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1116	XXX_unrecognized     []byte   `json:"-"`
1117	XXX_sizecache        int32    `json:"-"`
1118}
1119
1120func (m *ListDataSourceDefinitionsResponse) Reset()         { *m = ListDataSourceDefinitionsResponse{} }
1121func (m *ListDataSourceDefinitionsResponse) String() string { return proto.CompactTextString(m) }
1122func (*ListDataSourceDefinitionsResponse) ProtoMessage()    {}
1123func (*ListDataSourceDefinitionsResponse) Descriptor() ([]byte, []int) {
1124	return fileDescriptor_63170854e2f004ff, []int{10}
1125}
1126
1127func (m *ListDataSourceDefinitionsResponse) XXX_Unmarshal(b []byte) error {
1128	return xxx_messageInfo_ListDataSourceDefinitionsResponse.Unmarshal(m, b)
1129}
1130func (m *ListDataSourceDefinitionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1131	return xxx_messageInfo_ListDataSourceDefinitionsResponse.Marshal(b, m, deterministic)
1132}
1133func (m *ListDataSourceDefinitionsResponse) XXX_Merge(src proto.Message) {
1134	xxx_messageInfo_ListDataSourceDefinitionsResponse.Merge(m, src)
1135}
1136func (m *ListDataSourceDefinitionsResponse) XXX_Size() int {
1137	return xxx_messageInfo_ListDataSourceDefinitionsResponse.Size(m)
1138}
1139func (m *ListDataSourceDefinitionsResponse) XXX_DiscardUnknown() {
1140	xxx_messageInfo_ListDataSourceDefinitionsResponse.DiscardUnknown(m)
1141}
1142
1143var xxx_messageInfo_ListDataSourceDefinitionsResponse proto.InternalMessageInfo
1144
1145func (m *ListDataSourceDefinitionsResponse) GetDataSourceDefinitions() []*DataSourceDefinition {
1146	if m != nil {
1147		return m.DataSourceDefinitions
1148	}
1149	return nil
1150}
1151
1152func (m *ListDataSourceDefinitionsResponse) GetNextPageToken() string {
1153	if m != nil {
1154		return m.NextPageToken
1155	}
1156	return ""
1157}
1158
1159// Represents the data source definition.
1160type DataSourceDefinition struct {
1161	// The resource name of the data source definition.
1162	// Data source definition names have the form
1163	// `projects/{project_id}/locations/{location}/dataSourceDefinitions/{data_source_id}`.
1164	Name string `protobuf:"bytes,21,opt,name=name,proto3" json:"name,omitempty"`
1165	// Data source metadata.
1166	DataSource *DataSource `protobuf:"bytes,1,opt,name=data_source,json=dataSource,proto3" json:"data_source,omitempty"`
1167	// The Pub/Sub topic to be used for broadcasting a message when a transfer run
1168	// is created. Both this topic and transfer_config_pubsub_topic can be
1169	// set to a custom topic. By default, both topics are auto-generated if none
1170	// of them is provided when creating the definition. However, if one topic is
1171	// manually set, the other topic has to be manually set as well. The only
1172	// difference is that transfer_run_pubsub_topic must be a non-empty Pub/Sub
1173	// topic, but transfer_config_pubsub_topic can be set to empty. The comments
1174	// about "{location}" for transfer_config_pubsub_topic apply here too.
1175	TransferRunPubsubTopic string `protobuf:"bytes,13,opt,name=transfer_run_pubsub_topic,json=transferRunPubsubTopic,proto3" json:"transfer_run_pubsub_topic,omitempty"`
1176	// Duration which should be added to schedule_time to calculate
1177	// run_time when job is scheduled. Only applicable for automatically
1178	// scheduled transfer runs. Used to start a run early on a data source that
1179	// supports continuous data refresh to compensate for unknown timezone
1180	// offsets. Use a negative number to start a run late for data sources not
1181	// supporting continuous data refresh.
1182	RunTimeOffset *duration.Duration `protobuf:"bytes,16,opt,name=run_time_offset,json=runTimeOffset,proto3" json:"run_time_offset,omitempty"`
1183	// Support e-mail address of the OAuth client's Brand, which contains the
1184	// consent screen data.
1185	SupportEmail string `protobuf:"bytes,22,opt,name=support_email,json=supportEmail,proto3" json:"support_email,omitempty"`
1186	// When service account is specified, BigQuery will share created dataset
1187	// with the given service account. Also, this service account will be
1188	// eligible to perform status updates and message logging for data transfer
1189	// runs for the corresponding data_source_id.
1190	ServiceAccount string `protobuf:"bytes,2,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"`
1191	// Is data source disabled? If true, data_source is not visible.
1192	// API will also stop returning any data transfer configs and/or runs
1193	// associated with the data source. This setting has higher priority
1194	// than whitelisted_project_ids.
1195	Disabled bool `protobuf:"varint,5,opt,name=disabled,proto3" json:"disabled,omitempty"`
1196	// The Pub/Sub topic to use for broadcasting a message for transfer config. If
1197	// empty, a message will not be broadcasted. Both this topic and
1198	// transfer_run_pubsub_topic are auto-generated if none of them is provided
1199	// when creating the definition. It is recommended to provide
1200	// transfer_config_pubsub_topic if a user-owned transfer_run_pubsub_topic is
1201	// provided. Otherwise, it will be set to empty. If "{location}" is found in
1202	// the value, then that means, data source wants to handle message separately
1203	// for datasets in different regions. We will replace {location} with the
1204	// actual dataset location, as the actual topic name. For example,
1205	// projects/connector/topics/scheduler-{location} could become
1206	// projects/connector/topics/scheduler-us. If "{location}" is not found, then
1207	// we will use the input value as topic name.
1208	TransferConfigPubsubTopic string `protobuf:"bytes,12,opt,name=transfer_config_pubsub_topic,json=transferConfigPubsubTopic,proto3" json:"transfer_config_pubsub_topic,omitempty"`
1209	// Supported location_ids used for deciding in which locations Pub/Sub topics
1210	// need to be created. If custom Pub/Sub topics are used and they contains
1211	// '{location}', the location_ids will be used for validating the topics by
1212	// replacing the '{location}' with the individual location in the list. The
1213	// valid values are the "location_id" field of the response of `GET
1214	// https://bigquerydatatransfer.googleapis.com/v1/{name=projects/*}/locations`
1215	// In addition, if the data source needs to support all available regions,
1216	// supported_location_ids can be set to "global" (a single string element).
1217	// When "global" is specified:
1218	// 1) the data source implementation is supposed to stage the data in proper
1219	// region of the destination dataset;
1220	// 2) Data source developer should be aware of the implications (e.g., network
1221	// traffic latency, potential charge associated with cross-region traffic,
1222	// etc.) of supporting the "global" region;
1223	SupportedLocationIds []string `protobuf:"bytes,23,rep,name=supported_location_ids,json=supportedLocationIds,proto3" json:"supported_location_ids,omitempty"`
1224	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1225	XXX_unrecognized     []byte   `json:"-"`
1226	XXX_sizecache        int32    `json:"-"`
1227}
1228
1229func (m *DataSourceDefinition) Reset()         { *m = DataSourceDefinition{} }
1230func (m *DataSourceDefinition) String() string { return proto.CompactTextString(m) }
1231func (*DataSourceDefinition) ProtoMessage()    {}
1232func (*DataSourceDefinition) Descriptor() ([]byte, []int) {
1233	return fileDescriptor_63170854e2f004ff, []int{11}
1234}
1235
1236func (m *DataSourceDefinition) XXX_Unmarshal(b []byte) error {
1237	return xxx_messageInfo_DataSourceDefinition.Unmarshal(m, b)
1238}
1239func (m *DataSourceDefinition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1240	return xxx_messageInfo_DataSourceDefinition.Marshal(b, m, deterministic)
1241}
1242func (m *DataSourceDefinition) XXX_Merge(src proto.Message) {
1243	xxx_messageInfo_DataSourceDefinition.Merge(m, src)
1244}
1245func (m *DataSourceDefinition) XXX_Size() int {
1246	return xxx_messageInfo_DataSourceDefinition.Size(m)
1247}
1248func (m *DataSourceDefinition) XXX_DiscardUnknown() {
1249	xxx_messageInfo_DataSourceDefinition.DiscardUnknown(m)
1250}
1251
1252var xxx_messageInfo_DataSourceDefinition proto.InternalMessageInfo
1253
1254func (m *DataSourceDefinition) GetName() string {
1255	if m != nil {
1256		return m.Name
1257	}
1258	return ""
1259}
1260
1261func (m *DataSourceDefinition) GetDataSource() *DataSource {
1262	if m != nil {
1263		return m.DataSource
1264	}
1265	return nil
1266}
1267
1268func (m *DataSourceDefinition) GetTransferRunPubsubTopic() string {
1269	if m != nil {
1270		return m.TransferRunPubsubTopic
1271	}
1272	return ""
1273}
1274
1275func (m *DataSourceDefinition) GetRunTimeOffset() *duration.Duration {
1276	if m != nil {
1277		return m.RunTimeOffset
1278	}
1279	return nil
1280}
1281
1282func (m *DataSourceDefinition) GetSupportEmail() string {
1283	if m != nil {
1284		return m.SupportEmail
1285	}
1286	return ""
1287}
1288
1289func (m *DataSourceDefinition) GetServiceAccount() string {
1290	if m != nil {
1291		return m.ServiceAccount
1292	}
1293	return ""
1294}
1295
1296func (m *DataSourceDefinition) GetDisabled() bool {
1297	if m != nil {
1298		return m.Disabled
1299	}
1300	return false
1301}
1302
1303func (m *DataSourceDefinition) GetTransferConfigPubsubTopic() string {
1304	if m != nil {
1305		return m.TransferConfigPubsubTopic
1306	}
1307	return ""
1308}
1309
1310func (m *DataSourceDefinition) GetSupportedLocationIds() []string {
1311	if m != nil {
1312		return m.SupportedLocationIds
1313	}
1314	return nil
1315}
1316
1317func init() {
1318	proto.RegisterEnum("google.cloud.bigquery.datatransfer.v1.WriteDisposition", WriteDisposition_name, WriteDisposition_value)
1319	proto.RegisterEnum("google.cloud.bigquery.datatransfer.v1.ImportedDataInfo_Format", ImportedDataInfo_Format_name, ImportedDataInfo_Format_value)
1320	proto.RegisterEnum("google.cloud.bigquery.datatransfer.v1.ImportedDataInfo_Encoding", ImportedDataInfo_Encoding_name, ImportedDataInfo_Encoding_value)
1321	proto.RegisterEnum("google.cloud.bigquery.datatransfer.v1.ImportedDataInfo_FieldSchema_Type", ImportedDataInfo_FieldSchema_Type_name, ImportedDataInfo_FieldSchema_Type_value)
1322	proto.RegisterType((*ImportedDataInfo)(nil), "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo")
1323	proto.RegisterType((*ImportedDataInfo_FieldSchema)(nil), "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema")
1324	proto.RegisterType((*ImportedDataInfo_RecordSchema)(nil), "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema")
1325	proto.RegisterType((*ImportedDataInfo_TableDefinition)(nil), "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition")
1326	proto.RegisterType((*ImportedDataInfo_TableDefinition_CsvOptions)(nil), "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions")
1327	proto.RegisterType((*UpdateTransferRunRequest)(nil), "google.cloud.bigquery.datatransfer.v1.UpdateTransferRunRequest")
1328	proto.RegisterType((*LogTransferRunMessagesRequest)(nil), "google.cloud.bigquery.datatransfer.v1.LogTransferRunMessagesRequest")
1329	proto.RegisterType((*StartBigQueryJobsRequest)(nil), "google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest")
1330	proto.RegisterType((*FinishRunRequest)(nil), "google.cloud.bigquery.datatransfer.v1.FinishRunRequest")
1331	proto.RegisterType((*CreateDataSourceDefinitionRequest)(nil), "google.cloud.bigquery.datatransfer.v1.CreateDataSourceDefinitionRequest")
1332	proto.RegisterType((*UpdateDataSourceDefinitionRequest)(nil), "google.cloud.bigquery.datatransfer.v1.UpdateDataSourceDefinitionRequest")
1333	proto.RegisterType((*DeleteDataSourceDefinitionRequest)(nil), "google.cloud.bigquery.datatransfer.v1.DeleteDataSourceDefinitionRequest")
1334	proto.RegisterType((*GetDataSourceDefinitionRequest)(nil), "google.cloud.bigquery.datatransfer.v1.GetDataSourceDefinitionRequest")
1335	proto.RegisterType((*ListDataSourceDefinitionsRequest)(nil), "google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest")
1336	proto.RegisterType((*ListDataSourceDefinitionsResponse)(nil), "google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsResponse")
1337	proto.RegisterType((*DataSourceDefinition)(nil), "google.cloud.bigquery.datatransfer.v1.DataSourceDefinition")
1338}
1339
1340func init() {
1341	proto.RegisterFile("google/cloud/bigquery/datatransfer/v1/datasource.proto", fileDescriptor_63170854e2f004ff)
1342}
1343
1344var fileDescriptor_63170854e2f004ff = []byte{
1345	// 2189 bytes of a gzipped FileDescriptorProto
1346	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x59, 0xdd, 0x6e, 0xdb, 0xc8,
1347	0x15, 0x2e, 0xfd, 0x23, 0xcb, 0x47, 0xfe, 0xa1, 0xa7, 0x5e, 0x47, 0x51, 0x92, 0x8d, 0xa3, 0xc5,
1348	0xa6, 0xde, 0x5c, 0x48, 0xb5, 0x9a, 0xe6, 0x17, 0xdd, 0xad, 0x2c, 0xd1, 0x0e, 0x53, 0x5b, 0x92,
1349	0x29, 0xda, 0x41, 0xba, 0x2e, 0x88, 0x31, 0x39, 0x52, 0x26, 0xa6, 0x48, 0x86, 0x43, 0xda, 0xc9,
1350	0x16, 0xdb, 0x8b, 0xbe, 0x42, 0x51, 0xa0, 0x37, 0x7d, 0x81, 0x02, 0x2d, 0xd0, 0x37, 0x68, 0x0b,
1351	0xf4, 0xa2, 0x7b, 0xbb, 0xd7, 0x45, 0xb7, 0x40, 0x81, 0x5e, 0x6f, 0x1f, 0xa0, 0x28, 0x66, 0x48,
1352	0xc9, 0x8c, 0x2c, 0x59, 0x8a, 0x9d, 0xde, 0x91, 0x67, 0xe6, 0x9c, 0xf9, 0xbe, 0x33, 0xdf, 0x9c,
1353	0x39, 0x94, 0xe0, 0x5e, 0xdb, 0x75, 0xdb, 0x36, 0x29, 0x9a, 0xb6, 0x1b, 0x5a, 0xc5, 0x43, 0xda,
1354	0x7e, 0x15, 0x12, 0xff, 0x4d, 0xd1, 0xc2, 0x01, 0x0e, 0x7c, 0xec, 0xb0, 0x16, 0xf1, 0x8b, 0xc7,
1355	0xeb, 0xe2, 0x9d, 0xb9, 0xa1, 0x6f, 0x92, 0x82, 0xe7, 0xbb, 0x81, 0x8b, 0x3e, 0x8e, 0xfc, 0x0a,
1356	0xc2, 0xaf, 0xd0, 0xf5, 0x2b, 0x24, 0xfd, 0x0a, 0xc7, 0xeb, 0xb9, 0xeb, 0x71, 0x78, 0xec, 0xd1,
1357	0x22, 0x76, 0x1c, 0x37, 0xc0, 0x01, 0x75, 0x1d, 0x16, 0x05, 0xc9, 0x3d, 0x18, 0x7f, 0xf1, 0x5e,
1358	0xd0, 0xc8, 0xf3, 0xee, 0x78, 0x9e, 0x7d, 0x5e, 0x1f, 0xc6, 0x5e, 0xe2, 0xed, 0x30, 0x6c, 0x15,
1359	0xad, 0xd0, 0x17, 0x80, 0xe2, 0xf1, 0x6b, 0xfd, 0xe3, 0xa4, 0xe3, 0x05, 0x6f, 0xe2, 0xc1, 0xd5,
1360	0xfe, 0xc1, 0x16, 0x25, 0xb6, 0x65, 0x74, 0x30, 0x3b, 0x8a, 0x67, 0xdc, 0xec, 0x9f, 0x11, 0xd0,
1361	0x0e, 0x61, 0x01, 0xee, 0x78, 0xc3, 0xd6, 0x3f, 0xf1, 0xb1, 0xe7, 0x11, 0xbf, 0x9b, 0x8f, 0x2b,
1362	0x89, 0x6c, 0x99, 0x36, 0x25, 0x4e, 0x10, 0x0d, 0xe4, 0xff, 0x23, 0x83, 0xac, 0x76, 0x3c, 0xd7,
1363	0x0f, 0x88, 0x55, 0xc5, 0x01, 0x56, 0x9d, 0x96, 0x8b, 0x64, 0x98, 0x64, 0xaf, 0xec, 0xac, 0xb4,
1364	0x2a, 0xad, 0xcd, 0x6a, 0xfc, 0x11, 0x7d, 0x1f, 0x96, 0x2d, 0xc2, 0x02, 0xea, 0x08, 0x52, 0x46,
1365	0x80, 0x0f, 0x6d, 0x62, 0x50, 0x2b, 0x3b, 0x21, 0xa6, 0xa0, 0xc4, 0x98, 0xce, 0x87, 0x54, 0x0b,
1366	0x6d, 0xc0, 0x8d, 0xb3, 0x1e, 0x16, 0x61, 0xa6, 0x4f, 0x3d, 0x6e, 0xc9, 0x82, 0x70, 0xbd, 0xd6,
1367	0xef, 0x5a, 0x3d, 0x9d, 0x82, 0x5a, 0x00, 0x5d, 0xbf, 0x16, 0xcb, 0x4e, 0xae, 0x4e, 0xae, 0x65,
1368	0x4a, 0x5b, 0x85, 0xb1, 0xf4, 0x51, 0xe8, 0x27, 0x55, 0x88, 0xa3, 0xb7, 0xa8, 0x43, 0x79, 0x70,
1369	0x6d, 0x36, 0x88, 0x0d, 0x0c, 0xdd, 0x85, 0x95, 0x90, 0x11, 0x9f, 0x2f, 0x43, 0x1d, 0x62, 0x19,
1370	0xad, 0xd0, 0x31, 0x85, 0x9a, 0xb2, 0x53, 0xab, 0x93, 0x6b, 0xb3, 0xda, 0x32, 0x1f, 0xad, 0x46,
1371	0x83, 0x9b, 0xdd, 0x31, 0x64, 0xc1, 0xd2, 0x89, 0x4f, 0x03, 0x62, 0x58, 0x94, 0x79, 0x2e, 0x13,
1372	0x51, 0xb3, 0xa9, 0x55, 0x69, 0x6d, 0xa1, 0x74, 0x7f, 0x4c, 0x90, 0xcf, 0xb8, 0x7f, 0xf5, 0xd4,
1373	0x5d, 0x93, 0x4f, 0xfa, 0x2c, 0xb9, 0x7f, 0x4c, 0x42, 0x66, 0x93, 0xeb, 0xa1, 0x69, 0xbe, 0x20,
1374	0x1d, 0x8c, 0x6e, 0x00, 0x44, 0xf2, 0x70, 0x70, 0x87, 0xc4, 0x5b, 0x34, 0x2b, 0x2c, 0x35, 0xdc,
1375	0x21, 0xe8, 0x00, 0xa6, 0x82, 0x37, 0x1e, 0x11, 0x1b, 0xb3, 0x50, 0x7a, 0x72, 0xd1, 0x64, 0x25,
1376	0x56, 0x2c, 0xe8, 0x6f, 0x3c, 0xa2, 0x89, 0xa8, 0xe8, 0x26, 0x64, 0x28, 0x33, 0x7c, 0xe2, 0x11,
1377	0x1c, 0x10, 0x2b, 0x3b, 0xb9, 0x2a, 0xad, 0xa5, 0x35, 0xa0, 0x4c, 0x8b, 0x2d, 0x68, 0x15, 0x32,
1378	0xc9, 0x3d, 0x9e, 0x12, 0xf0, 0x92, 0x26, 0x74, 0x00, 0x29, 0x26, 0xe2, 0x66, 0xa7, 0x57, 0xa5,
1379	0xb5, 0x4c, 0xa9, 0x7a, 0x51, 0x88, 0x1a, 0x31, 0x5d, 0x3f, 0xc6, 0xa8, 0xc5, 0x31, 0xf3, 0x7f,
1380	0x94, 0x60, 0x8a, 0xe3, 0x45, 0xcb, 0x20, 0xeb, 0xcf, 0x1b, 0x8a, 0xb1, 0x57, 0x6b, 0x36, 0x94,
1381	0x8a, 0xba, 0xa9, 0x2a, 0x55, 0xf9, 0x3b, 0x08, 0x20, 0xd5, 0xd4, 0x35, 0xb5, 0xb6, 0x25, 0x4b,
1382	0x28, 0x03, 0x33, 0x6a, 0x4d, 0x57, 0xb6, 0x14, 0x4d, 0x9e, 0x40, 0xb3, 0x30, 0xbd, 0xb9, 0x5d,
1383	0x2f, 0xeb, 0xf2, 0x24, 0x9f, 0xa3, 0x29, 0x95, 0xba, 0x56, 0x95, 0xa7, 0xb8, 0x79, 0xe3, 0xb9,
1384	0xae, 0x34, 0xe5, 0x69, 0x3e, 0x7d, 0xa3, 0x5e, 0xdf, 0x56, 0xca, 0x35, 0x39, 0x85, 0xe6, 0x61,
1385	0x56, 0x57, 0x77, 0x94, 0xa6, 0x5e, 0xde, 0x69, 0xc8, 0x33, 0x28, 0x0d, 0x53, 0xd5, 0xb2, 0xae,
1386	0xc8, 0x69, 0xfe, 0xc4, 0x07, 0xe4, 0x59, 0x34, 0x07, 0x69, 0x6e, 0x13, 0x6f, 0xc0, 0xbd, 0x6b,
1387	0x7b, 0x3b, 0x8a, 0xa6, 0x56, 0xe4, 0x0c, 0xf7, 0xde, 0x52, 0xea, 0x5b, 0x5a, 0xb9, 0xf1, 0xe4,
1388	0xb9, 0x3c, 0x97, 0x3b, 0x82, 0xb9, 0x24, 0x17, 0xf4, 0x39, 0xa4, 0xc4, 0x7e, 0xb2, 0xac, 0x24,
1389	0x14, 0x5f, 0x79, 0x0f, 0x9b, 0xa8, 0xc5, 0x21, 0x73, 0x7f, 0x99, 0x81, 0xc5, 0xbe, 0x93, 0x80,
1390	0xae, 0x42, 0xba, 0x77, 0xa0, 0x23, 0x41, 0xcd, 0x04, 0xf1, 0x29, 0xbe, 0x09, 0x99, 0xa8, 0x38,
1391	0x1b, 0xa1, 0x4f, 0x59, 0x76, 0x42, 0x1c, 0x07, 0x88, 0x4c, 0x7b, 0x3e, 0x65, 0x68, 0x1f, 0x52,
1392	0x2d, 0xd7, 0xef, 0xe0, 0x40, 0x88, 0x61, 0xa1, 0xf4, 0xe9, 0x85, 0xc1, 0x8a, 0x28, 0x5a, 0x1c,
1393	0x0d, 0xdd, 0x86, 0xc5, 0x0e, 0x7e, 0x6d, 0x1c, 0x62, 0xcb, 0xf0, 0x45, 0x72, 0x98, 0x10, 0xd3,
1394	0xb4, 0x36, 0xdf, 0xc1, 0xaf, 0x37, 0xb0, 0x15, 0x65, 0x8c, 0xa1, 0x03, 0x48, 0x13, 0xc7, 0x74,
1395	0x2d, 0xea, 0xb4, 0x85, 0xa0, 0x16, 0x4a, 0x3f, 0xbe, 0x28, 0x02, 0x25, 0x8e, 0xa3, 0xf5, 0x22,
1396	0x22, 0x06, 0x19, 0x93, 0x1d, 0x1b, 0xae, 0x17, 0x55, 0x83, 0x94, 0x50, 0xac, 0xf6, 0x9e, 0x2a,
1397	0x50, 0xa1, 0xc2, 0x8e, 0xeb, 0x51, 0x64, 0x0d, 0xcc, 0xde, 0x73, 0xe2, 0x84, 0xcc, 0xbc, 0xff,
1398	0x13, 0x82, 0x6a, 0xf0, 0x01, 0x6d, 0x3b, 0xae, 0x4f, 0x8c, 0xd0, 0x39, 0x72, 0xdc, 0x13, 0xc7,
1399	0x38, 0xc6, 0x76, 0x48, 0x98, 0xa8, 0xc7, 0x99, 0x52, 0xae, 0xbb, 0x58, 0xf7, 0x26, 0x29, 0x6c,
1400	0xb8, 0xae, 0xbd, 0xcf, 0xa7, 0x68, 0xdf, 0x8d, 0x1c, 0xf7, 0x22, 0x3f, 0x61, 0x63, 0xb9, 0x6f,
1401	0x27, 0x00, 0x4e, 0x89, 0x20, 0x05, 0x16, 0xa3, 0xf2, 0x64, 0x11, 0x9b, 0x76, 0x68, 0x40, 0x7c,
1402	0x21, 0xa9, 0x4c, 0xe9, 0xfa, 0x99, 0xc0, 0xcd, 0xc0, 0xa7, 0x4e, 0x3b, 0x0a, 0xbd, 0x20, 0x9c,
1403	0xaa, 0x5d, 0x1f, 0x8e, 0x12, 0xdb, 0xb6, 0x7b, 0x62, 0xbc, 0x0a, 0xdd, 0x80, 0x58, 0x86, 0x43,
1404	0x4e, 0x6c, 0xea, 0x10, 0x26, 0xea, 0xda, 0x08, 0x94, 0xc2, 0x71, 0x57, 0xf8, 0xd5, 0x62, 0x37,
1405	0xf4, 0x18, 0x40, 0x44, 0x32, 0xcc, 0x17, 0xd8, 0x17, 0x52, 0x1d, 0x85, 0x68, 0x56, 0xcc, 0xaf,
1406	0xbc, 0xc0, 0x3e, 0xda, 0x82, 0x25, 0x76, 0x44, 0x3d, 0xc3, 0x26, 0x98, 0xab, 0xc2, 0xf0, 0xdd,
1407	0x93, 0x48, 0x8d, 0x99, 0xd2, 0xb5, 0x33, 0x31, 0x54, 0x27, 0xb8, 0x77, 0x37, 0x0a, 0xb1, 0xc8,
1408	0xbd, 0xb6, 0x23, 0x27, 0xcd, 0x3d, 0x61, 0x68, 0x13, 0x96, 0x22, 0x56, 0x2f, 0x71, 0xbb, 0x4d,
1409	0xac, 0x28, 0xd0, 0xf4, 0x48, 0x46, 0x8b, 0xc2, 0xe9, 0xa9, 0xf0, 0xe1, 0x71, 0xf2, 0xbf, 0x80,
1410	0x54, 0x74, 0x5c, 0xd0, 0x0a, 0xa0, 0xcd, 0xba, 0xb6, 0x53, 0xd6, 0xfb, 0x0a, 0xdd, 0x0c, 0x4c,
1411	0x56, 0x9a, 0xfb, 0xb2, 0xc4, 0x0b, 0xd2, 0xd3, 0x66, 0xbd, 0x26, 0x4f, 0xf0, 0xa7, 0xf2, 0xbe,
1412	0x56, 0x97, 0x27, 0x79, 0x69, 0x8a, 0x2a, 0x9c, 0x5a, 0x97, 0xa7, 0xf8, 0x5b, 0xa5, 0xbe, 0xbd,
1413	0xb7, 0x53, 0x53, 0xeb, 0xf2, 0x34, 0xaf, 0x4d, 0x95, 0x72, 0xa3, 0x5c, 0x51, 0xf5, 0xba, 0x26,
1414	0xa7, 0x78, 0xdd, 0x6a, 0x94, 0xb5, 0xdd, 0x3d, 0x45, 0x97, 0x67, 0x78, 0xd0, 0xba, 0x56, 0x91,
1415	0xd3, 0xf9, 0x4f, 0x21, 0xdd, 0x3d, 0x2c, 0x28, 0x0b, 0xcb, 0x4a, 0xad, 0x52, 0xaf, 0xaa, 0xb5,
1416	0xad, 0x3e, 0x0c, 0x0b, 0x00, 0x6a, 0xb3, 0x6e, 0x3c, 0x78, 0xf0, 0xc3, 0x87, 0xc6, 0x7a, 0x04,
1417	0x65, 0x4f, 0xdf, 0x7c, 0x20, 0x4f, 0xe4, 0xff, 0x20, 0x41, 0x76, 0xcf, 0xb3, 0x70, 0x40, 0xf4,
1418	0x58, 0xbf, 0x5a, 0xe8, 0x68, 0xe4, 0x55, 0x48, 0x58, 0x80, 0xf6, 0x60, 0xae, 0xab, 0x6a, 0xc3,
1419	0x0f, 0x9d, 0x58, 0x3e, 0xa5, 0x31, 0x0f, 0x41, 0x32, 0x60, 0x26, 0x38, 0x7d, 0x41, 0x8f, 0x21,
1420	0x13, 0x8a, 0x25, 0x45, 0x5f, 0x35, 0x54, 0x47, 0xa2, 0x66, 0xee, 0x60, 0x76, 0xa4, 0x41, 0x34,
1421	0x9d, 0x3f, 0xe7, 0x7f, 0x23, 0xc1, 0x8d, 0x6d, 0xb7, 0x9d, 0x08, 0xbe, 0x43, 0x18, 0xc3, 0x6d,
1422	0xc2, 0xba, 0xa8, 0x11, 0x4c, 0x25, 0x2e, 0x64, 0xf1, 0x8c, 0x4c, 0x58, 0xea, 0x31, 0xe9, 0xc4,
1423	0xf3, 0x45, 0x09, 0xcd, 0x94, 0xee, 0xbd, 0x23, 0x9d, 0x78, 0x39, 0x4d, 0x0e, 0xde, 0x36, 0xb0,
1424	0xfc, 0x37, 0x12, 0x64, 0x9b, 0x01, 0xf6, 0x83, 0x0d, 0xda, 0xde, 0xe5, 0x31, 0x9e, 0xba, 0x87,
1425	0xe7, 0xa2, 0x3a, 0x80, 0x79, 0x1a, 0x57, 0x0a, 0x83, 0x2f, 0x17, 0x23, 0xba, 0x7f, 0xc1, 0x2a,
1426	0xa3, 0xcd, 0xd1, 0x84, 0x05, 0x7d, 0x02, 0xb2, 0x68, 0xa5, 0x4c, 0x9f, 0x58, 0xc4, 0x09, 0x28,
1427	0xb6, 0x99, 0x38, 0x6e, 0x73, 0xda, 0x22, 0xb7, 0x57, 0x4e, 0xcd, 0xe8, 0x7b, 0x51, 0x89, 0xf7,
1428	0xb0, 0x8f, 0x6d, 0x9b, 0xd8, 0x94, 0x75, 0xb2, 0x69, 0x51, 0xe2, 0x17, 0x3a, 0xf8, 0x75, 0xe3,
1429	0xd4, 0x9a, 0xbf, 0x0d, 0xf2, 0x26, 0x75, 0x28, 0x7b, 0x91, 0x50, 0xc9, 0x00, 0x66, 0x5c, 0x56,
1430	0xb7, 0x2a, 0x3e, 0x6f, 0x44, 0x38, 0x94, 0xa6, 0xb8, 0xa4, 0x12, 0x0d, 0x5f, 0xec, 0xb9, 0x02,
1431	0x29, 0x0f, 0xfb, 0xc4, 0x09, 0x62, 0xdf, 0xf8, 0x0d, 0xbd, 0x82, 0x15, 0xce, 0xd5, 0x88, 0xef,
1432	0x3b, 0xab, 0xe7, 0x18, 0x6b, 0xe5, 0xf1, 0x98, 0x09, 0x1a, 0xb8, 0xf6, 0xb2, 0x35, 0xc0, 0x9a,
1433	0xff, 0x5a, 0x82, 0x5b, 0xd1, 0x39, 0x38, 0x0f, 0xf0, 0x70, 0x60, 0xd2, 0xff, 0x09, 0xd8, 0xe5,
1434	0x0e, 0xcb, 0x7d, 0xb8, 0x55, 0x25, 0x36, 0x39, 0x9f, 0xd4, 0xa0, 0xfd, 0xbb, 0x0b, 0x1f, 0x6e,
1435	0x91, 0xe0, 0x5d, 0xbd, 0x8e, 0x61, 0x75, 0x9b, 0xb2, 0x81, 0x6e, 0x6c, 0xd4, 0x9e, 0xdf, 0x00,
1436	0xf0, 0x70, 0x9b, 0x18, 0x81, 0x7b, 0x44, 0x9c, 0xf8, 0x63, 0x66, 0x96, 0x5b, 0x74, 0x6e, 0x40,
1437	0xd7, 0x40, 0xbc, 0x18, 0x8c, 0x7e, 0x41, 0x84, 0x8a, 0xa7, 0xb5, 0x34, 0x37, 0x34, 0xe9, 0x17,
1438	0x24, 0xff, 0x27, 0x09, 0x6e, 0x9d, 0xb3, 0x30, 0xf3, 0x5c, 0x87, 0x11, 0xc4, 0xe0, 0xca, 0xe0,
1439	0xcd, 0xeb, 0x76, 0x77, 0x97, 0xda, 0xbd, 0x0f, 0x06, 0xed, 0x1e, 0xe3, 0xcd, 0x93, 0x43, 0x5e,
1440	0x07, 0xc6, 0x19, 0x6e, 0xf3, 0xdc, 0xdc, 0xe8, 0xf2, 0xcb, 0xff, 0x7d, 0x12, 0x96, 0x07, 0xc5,
1441	0xed, 0xe5, 0xf9, 0x83, 0x44, 0xdd, 0xd0, 0x20, 0x93, 0x60, 0x12, 0x6b, 0x6f, 0xfd, 0x9d, 0xd1,
1442	0x6b, 0x70, 0x8a, 0x19, 0x3d, 0x84, 0xab, 0xc9, 0x5a, 0x6f, 0x78, 0xe1, 0x21, 0x0b, 0x0f, 0x8d,
1443	0xc0, 0xf5, 0xa8, 0x99, 0x9d, 0x17, 0x8b, 0xaf, 0x24, 0x8a, 0x78, 0x43, 0x0c, 0xeb, 0x7c, 0x14,
1444	0x95, 0x61, 0x91, 0x7b, 0xf0, 0x0f, 0x61, 0xc3, 0x6d, 0xb5, 0x18, 0x09, 0xb2, 0xb2, 0x80, 0x74,
1445	0xf5, 0x8c, 0x4c, 0xab, 0xf1, 0xb7, 0xb8, 0x36, 0xef, 0x87, 0x8e, 0x4e, 0x3b, 0xa4, 0x2e, 0xe6,
1446	0xa3, 0x8f, 0x60, 0x9e, 0x85, 0x1e, 0x2f, 0x5e, 0x06, 0xe9, 0x60, 0x6a, 0x67, 0x57, 0xc4, 0x8a,
1447	0x73, 0xb1, 0x51, 0xe1, 0x36, 0x5e, 0xa5, 0x18, 0xf1, 0x8f, 0xa9, 0x49, 0x0c, 0x6c, 0x9a, 0x6e,
1448	0xe8, 0x04, 0x71, 0x2e, 0x17, 0x62, 0x73, 0x39, 0xb2, 0xa2, 0x1c, 0xa4, 0x2d, 0xca, 0x78, 0x8b,
1449	0x67, 0x89, 0x3b, 0x3d, 0xad, 0xf5, 0xde, 0xd1, 0x67, 0x70, 0xbd, 0xc7, 0xd3, 0x74, 0x9d, 0x16,
1450	0x6d, 0xbf, 0x4d, 0x75, 0x4e, 0x44, 0xec, 0xe5, 0xa2, 0x22, 0xa6, 0x24, 0xd9, 0xde, 0x85, 0x95,
1451	0x18, 0x15, 0xb1, 0x0c, 0xdb, 0x35, 0xa3, 0x8f, 0x6a, 0x6a, 0xb1, 0xec, 0x95, 0xe8, 0x0b, 0xb5,
1452	0x37, 0xba, 0x1d, 0x0f, 0xaa, 0x16, 0xbb, 0xf3, 0x39, 0xc8, 0xfd, 0x5f, 0x98, 0xe8, 0x16, 0xdc,
1453	0x78, 0xa6, 0xa9, 0xba, 0x62, 0x54, 0xd5, 0x66, 0xa3, 0xde, 0x54, 0x75, 0xb5, 0x5e, 0xeb, 0xbb,
1454	0xb8, 0x11, 0x2c, 0x44, 0x53, 0x74, 0x6d, 0xaf, 0x56, 0xe1, 0x1f, 0x36, 0x12, 0x92, 0x61, 0x2e,
1455	0xb2, 0x95, 0x1b, 0x0d, 0xa5, 0x56, 0x95, 0x27, 0x4a, 0xbf, 0x95, 0x61, 0xe9, 0x74, 0x5b, 0x9b,
1456	0x51, 0x32, 0xd0, 0xbf, 0x25, 0x58, 0x3a, 0x73, 0xb5, 0xa3, 0xcf, 0xc6, 0x94, 0xc9, 0xb0, 0xa6,
1457	0x20, 0x77, 0x81, 0xeb, 0x3f, 0xff, 0xb3, 0x5f, 0x7e, 0xfd, 0xaf, 0x5f, 0x4d, 0x3c, 0x2b, 0x6d,
1458	0x15, 0x8f, 0xd7, 0x8b, 0x3f, 0x4f, 0xea, 0xac, 0xc0, 0x05, 0xfd, 0x23, 0xcf, 0x77, 0x5f, 0x12,
1459	0x33, 0x60, 0xc5, 0x3b, 0xc5, 0x6e, 0x46, 0xf9, 0xf3, 0xdb, 0x9b, 0xc0, 0x2d, 0x7e, 0xc8, 0x07,
1460	0xbe, 0x7c, 0xf4, 0x56, 0x5f, 0x82, 0xfe, 0x26, 0xc1, 0xca, 0xe0, 0x9e, 0x00, 0x8d, 0xdb, 0xb1,
1461	0x9f, 0xdb, 0x52, 0xe4, 0x56, 0xce, 0x08, 0x59, 0xe9, 0x78, 0xc1, 0x9b, 0xfc, 0xae, 0xe0, 0xf5,
1462	0x93, 0xfc, 0xa6, 0xe0, 0x75, 0x31, 0x2a, 0xb6, 0xdb, 0xee, 0x2e, 0xf7, 0x48, 0xba, 0x83, 0xfe,
1463	0x2a, 0xc1, 0xd2, 0x99, 0x26, 0x62, 0xec, 0x5d, 0x1b, 0xd6, 0x7e, 0x0c, 0x65, 0xf0, 0x4c, 0x30,
1464	0xd8, 0xcd, 0x6f, 0x5f, 0x82, 0x01, 0xeb, 0x5f, 0x94, 0xf3, 0xf8, 0xbd, 0x04, 0xb3, 0xbd, 0x56,
1465	0x01, 0x8d, 0xdb, 0xd2, 0xf4, 0x37, 0x17, 0x43, 0x71, 0xd7, 0x05, 0x6e, 0x35, 0x5f, 0xbd, 0x04,
1466	0xee, 0x56, 0x77, 0x31, 0x8e, 0xf7, 0xbf, 0x12, 0xe4, 0x86, 0x77, 0x2c, 0x68, 0xdc, 0x9f, 0x6f,
1467	0x46, 0x36, 0x3d, 0xb9, 0xcb, 0xdc, 0x32, 0xf9, 0xe7, 0x82, 0x76, 0x33, 0xff, 0x50, 0xd0, 0x8e,
1468	0xae, 0xce, 0x21, 0xc4, 0xbf, 0x2c, 0x0e, 0xbc, 0x91, 0x1e, 0x0d, 0xe9, 0x60, 0xd0, 0xaf, 0x27,
1469	0x20, 0x37, 0xbc, 0x03, 0x1a, 0x3b, 0x01, 0x23, 0x9b, 0xa8, 0xcb, 0x25, 0xc0, 0x13, 0x09, 0x78,
1470	0x59, 0x6a, 0x88, 0x04, 0x0c, 0xa6, 0x72, 0x6e, 0x4d, 0x19, 0x98, 0x14, 0xae, 0x86, 0x61, 0x79,
1471	0xf9, 0xb3, 0x04, 0xb9, 0xe1, 0x4d, 0xd4, 0xd8, 0x79, 0x19, 0xd9, 0x87, 0x0d, 0x95, 0x7a, 0x59,
1472	0x50, 0x7e, 0x7c, 0xe7, 0xe1, 0x48, 0xa9, 0x0f, 0xe3, 0x86, 0xbe, 0x91, 0xe0, 0xca, 0x90, 0x7e,
1473	0x0e, 0x29, 0x63, 0x12, 0x38, 0xbf, 0x1f, 0xbc, 0xdc, 0xae, 0xc6, 0x14, 0xd1, 0x25, 0x28, 0x7e,
1474	0x2b, 0xc1, 0xd5, 0xa1, 0x3d, 0x20, 0x1a, 0xf7, 0xa7, 0xea, 0x51, 0xed, 0x6b, 0xee, 0xc9, 0xe5,
1475	0x03, 0x45, 0xed, 0x68, 0x1f, 0xe7, 0x8b, 0x1c, 0xe5, 0xdc, 0xed, 0xaf, 0xca, 0x1f, 0x75, 0x11,
1476	0xbc, 0x05, 0x20, 0xc2, 0x88, 0x3d, 0xca, 0x0a, 0xa6, 0xdb, 0xd9, 0xf8, 0xa7, 0x04, 0x9f, 0x98,
1477	0x6e, 0x67, 0x3c, 0xe8, 0x1b, 0x8b, 0xa7, 0xb8, 0x1b, 0x5c, 0x89, 0x0d, 0xe9, 0xa7, 0xbb, 0xb1,
1478	0x67, 0xdb, 0xb5, 0xb1, 0xd3, 0x2e, 0xb8, 0x7e, 0xbb, 0xd8, 0x26, 0x8e, 0xd0, 0x69, 0xf1, 0x74,
1479	0xad, 0x11, 0x7f, 0xd4, 0x3c, 0x4e, 0xbe, 0xff, 0x6e, 0xe2, 0xe3, 0xad, 0x28, 0x66, 0x45, 0xa0,
1480	0xe9, 0x5e, 0x1f, 0x42, 0x1f, 0xdd, 0x9b, 0xb8, 0xb0, 0xbf, 0xfe, 0x55, 0x77, 0xde, 0x81, 0x98,
1481	0x77, 0xd0, 0x9d, 0x77, 0x90, 0x9c, 0x77, 0xb0, 0xbf, 0x7e, 0x98, 0x12, 0x68, 0x7e, 0xf0, 0xbf,
1482	0x00, 0x00, 0x00, 0xff, 0xff, 0x86, 0x02, 0xf2, 0x9b, 0xec, 0x1a, 0x00, 0x00,
1483}
1484
1485// Reference imports to suppress errors if they are not otherwise used.
1486var _ context.Context
1487var _ grpc.ClientConnInterface
1488
1489// This is a compile-time assertion to ensure that this generated file
1490// is compatible with the grpc package it is being compiled against.
1491const _ = grpc.SupportPackageIsVersion4
1492
1493// DataSourceServiceClient is the client API for DataSourceService service.
1494//
1495// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
1496type DataSourceServiceClient interface {
1497	// Update a transfer run. If successful, resets
1498	// data_source.update_deadline_seconds timer.
1499	UpdateTransferRun(ctx context.Context, in *UpdateTransferRunRequest, opts ...grpc.CallOption) (*TransferRun, error)
1500	// Log messages for a transfer run. If successful (at least 1 message), resets
1501	// data_source.update_deadline_seconds timer.
1502	LogTransferRunMessages(ctx context.Context, in *LogTransferRunMessagesRequest, opts ...grpc.CallOption) (*empty.Empty, error)
1503	// Notify the Data Transfer Service that data is ready for loading.
1504	// The Data Transfer Service will start and monitor multiple BigQuery Load
1505	// jobs for a transfer run. Monitored jobs will be automatically retried
1506	// and produce log messages when starting and finishing a job.
1507	// Can be called multiple times for the same transfer run.
1508	StartBigQueryJobs(ctx context.Context, in *StartBigQueryJobsRequest, opts ...grpc.CallOption) (*empty.Empty, error)
1509	// Notify the Data Transfer Service that the data source is done processing
1510	// the run. No more status updates or requests to start/monitor jobs will be
1511	// accepted. The run will be finalized by the Data Transfer Service when all
1512	// monitored jobs are completed.
1513	// Does not need to be called if the run is set to FAILED.
1514	FinishRun(ctx context.Context, in *FinishRunRequest, opts ...grpc.CallOption) (*empty.Empty, error)
1515	// Creates a data source definition.  Calling this method will automatically
1516	// use your credentials to create the following Google Cloud resources in
1517	// YOUR Google Cloud project.
1518	// 1. OAuth client
1519	// 2. Pub/Sub Topics and Subscriptions in each supported_location_ids. e.g.,
1520	// projects/{project_id}/{topics|subscriptions}/bigquerydatatransfer.{data_source_id}.{location_id}.run
1521	// The field data_source.client_id should be left empty in the input request,
1522	// as the API will create a new OAuth client on behalf of the caller. On the
1523	// other hand data_source.scopes usually need to be set when there are OAuth
1524	// scopes that need to be granted by end users.
1525	// 3. We need a longer deadline due to the 60 seconds SLO from Pub/Sub admin
1526	// Operations. This also applies to update and delete data source definition.
1527	CreateDataSourceDefinition(ctx context.Context, in *CreateDataSourceDefinitionRequest, opts ...grpc.CallOption) (*DataSourceDefinition, error)
1528	// Updates an existing data source definition. If changing
1529	// supported_location_ids, triggers same effects as mentioned in "Create a
1530	// data source definition."
1531	UpdateDataSourceDefinition(ctx context.Context, in *UpdateDataSourceDefinitionRequest, opts ...grpc.CallOption) (*DataSourceDefinition, error)
1532	// Deletes a data source definition, all of the transfer configs associated
1533	// with this data source definition (if any) must be deleted first by the user
1534	// in ALL regions, in order to delete the data source definition.
1535	// This method is primarily meant for deleting data sources created during
1536	// testing stage.
1537	// If the data source is referenced by transfer configs in the region
1538	// specified in the request URL, the method will fail immediately. If in the
1539	// current region (e.g., US) it's not used by any transfer configs, but in
1540	// another region (e.g., EU) it is, then although the method will succeed in
1541	// region US, but it will fail when the deletion operation is replicated to
1542	// region EU. And eventually, the system will replicate the data source
1543	// definition back from EU to US, in order to bring all regions to
1544	// consistency. The final effect is that the data source appears to be
1545	// 'undeleted' in the US region.
1546	DeleteDataSourceDefinition(ctx context.Context, in *DeleteDataSourceDefinitionRequest, opts ...grpc.CallOption) (*empty.Empty, error)
1547	// Retrieves an existing data source definition.
1548	GetDataSourceDefinition(ctx context.Context, in *GetDataSourceDefinitionRequest, opts ...grpc.CallOption) (*DataSourceDefinition, error)
1549	// Lists supported data source definitions.
1550	ListDataSourceDefinitions(ctx context.Context, in *ListDataSourceDefinitionsRequest, opts ...grpc.CallOption) (*ListDataSourceDefinitionsResponse, error)
1551}
1552
1553type dataSourceServiceClient struct {
1554	cc grpc.ClientConnInterface
1555}
1556
1557func NewDataSourceServiceClient(cc grpc.ClientConnInterface) DataSourceServiceClient {
1558	return &dataSourceServiceClient{cc}
1559}
1560
1561func (c *dataSourceServiceClient) UpdateTransferRun(ctx context.Context, in *UpdateTransferRunRequest, opts ...grpc.CallOption) (*TransferRun, error) {
1562	out := new(TransferRun)
1563	err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/UpdateTransferRun", in, out, opts...)
1564	if err != nil {
1565		return nil, err
1566	}
1567	return out, nil
1568}
1569
1570func (c *dataSourceServiceClient) LogTransferRunMessages(ctx context.Context, in *LogTransferRunMessagesRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
1571	out := new(empty.Empty)
1572	err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/LogTransferRunMessages", in, out, opts...)
1573	if err != nil {
1574		return nil, err
1575	}
1576	return out, nil
1577}
1578
1579func (c *dataSourceServiceClient) StartBigQueryJobs(ctx context.Context, in *StartBigQueryJobsRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
1580	out := new(empty.Empty)
1581	err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/StartBigQueryJobs", in, out, opts...)
1582	if err != nil {
1583		return nil, err
1584	}
1585	return out, nil
1586}
1587
1588func (c *dataSourceServiceClient) FinishRun(ctx context.Context, in *FinishRunRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
1589	out := new(empty.Empty)
1590	err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/FinishRun", in, out, opts...)
1591	if err != nil {
1592		return nil, err
1593	}
1594	return out, nil
1595}
1596
1597func (c *dataSourceServiceClient) CreateDataSourceDefinition(ctx context.Context, in *CreateDataSourceDefinitionRequest, opts ...grpc.CallOption) (*DataSourceDefinition, error) {
1598	out := new(DataSourceDefinition)
1599	err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/CreateDataSourceDefinition", in, out, opts...)
1600	if err != nil {
1601		return nil, err
1602	}
1603	return out, nil
1604}
1605
1606func (c *dataSourceServiceClient) UpdateDataSourceDefinition(ctx context.Context, in *UpdateDataSourceDefinitionRequest, opts ...grpc.CallOption) (*DataSourceDefinition, error) {
1607	out := new(DataSourceDefinition)
1608	err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/UpdateDataSourceDefinition", in, out, opts...)
1609	if err != nil {
1610		return nil, err
1611	}
1612	return out, nil
1613}
1614
1615func (c *dataSourceServiceClient) DeleteDataSourceDefinition(ctx context.Context, in *DeleteDataSourceDefinitionRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
1616	out := new(empty.Empty)
1617	err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/DeleteDataSourceDefinition", in, out, opts...)
1618	if err != nil {
1619		return nil, err
1620	}
1621	return out, nil
1622}
1623
1624func (c *dataSourceServiceClient) GetDataSourceDefinition(ctx context.Context, in *GetDataSourceDefinitionRequest, opts ...grpc.CallOption) (*DataSourceDefinition, error) {
1625	out := new(DataSourceDefinition)
1626	err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/GetDataSourceDefinition", in, out, opts...)
1627	if err != nil {
1628		return nil, err
1629	}
1630	return out, nil
1631}
1632
1633func (c *dataSourceServiceClient) ListDataSourceDefinitions(ctx context.Context, in *ListDataSourceDefinitionsRequest, opts ...grpc.CallOption) (*ListDataSourceDefinitionsResponse, error) {
1634	out := new(ListDataSourceDefinitionsResponse)
1635	err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/ListDataSourceDefinitions", in, out, opts...)
1636	if err != nil {
1637		return nil, err
1638	}
1639	return out, nil
1640}
1641
1642// DataSourceServiceServer is the server API for DataSourceService service.
1643type DataSourceServiceServer interface {
1644	// Update a transfer run. If successful, resets
1645	// data_source.update_deadline_seconds timer.
1646	UpdateTransferRun(context.Context, *UpdateTransferRunRequest) (*TransferRun, error)
1647	// Log messages for a transfer run. If successful (at least 1 message), resets
1648	// data_source.update_deadline_seconds timer.
1649	LogTransferRunMessages(context.Context, *LogTransferRunMessagesRequest) (*empty.Empty, error)
1650	// Notify the Data Transfer Service that data is ready for loading.
1651	// The Data Transfer Service will start and monitor multiple BigQuery Load
1652	// jobs for a transfer run. Monitored jobs will be automatically retried
1653	// and produce log messages when starting and finishing a job.
1654	// Can be called multiple times for the same transfer run.
1655	StartBigQueryJobs(context.Context, *StartBigQueryJobsRequest) (*empty.Empty, error)
1656	// Notify the Data Transfer Service that the data source is done processing
1657	// the run. No more status updates or requests to start/monitor jobs will be
1658	// accepted. The run will be finalized by the Data Transfer Service when all
1659	// monitored jobs are completed.
1660	// Does not need to be called if the run is set to FAILED.
1661	FinishRun(context.Context, *FinishRunRequest) (*empty.Empty, error)
1662	// Creates a data source definition.  Calling this method will automatically
1663	// use your credentials to create the following Google Cloud resources in
1664	// YOUR Google Cloud project.
1665	// 1. OAuth client
1666	// 2. Pub/Sub Topics and Subscriptions in each supported_location_ids. e.g.,
1667	// projects/{project_id}/{topics|subscriptions}/bigquerydatatransfer.{data_source_id}.{location_id}.run
1668	// The field data_source.client_id should be left empty in the input request,
1669	// as the API will create a new OAuth client on behalf of the caller. On the
1670	// other hand data_source.scopes usually need to be set when there are OAuth
1671	// scopes that need to be granted by end users.
1672	// 3. We need a longer deadline due to the 60 seconds SLO from Pub/Sub admin
1673	// Operations. This also applies to update and delete data source definition.
1674	CreateDataSourceDefinition(context.Context, *CreateDataSourceDefinitionRequest) (*DataSourceDefinition, error)
1675	// Updates an existing data source definition. If changing
1676	// supported_location_ids, triggers same effects as mentioned in "Create a
1677	// data source definition."
1678	UpdateDataSourceDefinition(context.Context, *UpdateDataSourceDefinitionRequest) (*DataSourceDefinition, error)
1679	// Deletes a data source definition, all of the transfer configs associated
1680	// with this data source definition (if any) must be deleted first by the user
1681	// in ALL regions, in order to delete the data source definition.
1682	// This method is primarily meant for deleting data sources created during
1683	// testing stage.
1684	// If the data source is referenced by transfer configs in the region
1685	// specified in the request URL, the method will fail immediately. If in the
1686	// current region (e.g., US) it's not used by any transfer configs, but in
1687	// another region (e.g., EU) it is, then although the method will succeed in
1688	// region US, but it will fail when the deletion operation is replicated to
1689	// region EU. And eventually, the system will replicate the data source
1690	// definition back from EU to US, in order to bring all regions to
1691	// consistency. The final effect is that the data source appears to be
1692	// 'undeleted' in the US region.
1693	DeleteDataSourceDefinition(context.Context, *DeleteDataSourceDefinitionRequest) (*empty.Empty, error)
1694	// Retrieves an existing data source definition.
1695	GetDataSourceDefinition(context.Context, *GetDataSourceDefinitionRequest) (*DataSourceDefinition, error)
1696	// Lists supported data source definitions.
1697	ListDataSourceDefinitions(context.Context, *ListDataSourceDefinitionsRequest) (*ListDataSourceDefinitionsResponse, error)
1698}
1699
1700// UnimplementedDataSourceServiceServer can be embedded to have forward compatible implementations.
1701type UnimplementedDataSourceServiceServer struct {
1702}
1703
1704func (*UnimplementedDataSourceServiceServer) UpdateTransferRun(ctx context.Context, req *UpdateTransferRunRequest) (*TransferRun, error) {
1705	return nil, status.Errorf(codes.Unimplemented, "method UpdateTransferRun not implemented")
1706}
1707func (*UnimplementedDataSourceServiceServer) LogTransferRunMessages(ctx context.Context, req *LogTransferRunMessagesRequest) (*empty.Empty, error) {
1708	return nil, status.Errorf(codes.Unimplemented, "method LogTransferRunMessages not implemented")
1709}
1710func (*UnimplementedDataSourceServiceServer) StartBigQueryJobs(ctx context.Context, req *StartBigQueryJobsRequest) (*empty.Empty, error) {
1711	return nil, status.Errorf(codes.Unimplemented, "method StartBigQueryJobs not implemented")
1712}
1713func (*UnimplementedDataSourceServiceServer) FinishRun(ctx context.Context, req *FinishRunRequest) (*empty.Empty, error) {
1714	return nil, status.Errorf(codes.Unimplemented, "method FinishRun not implemented")
1715}
1716func (*UnimplementedDataSourceServiceServer) CreateDataSourceDefinition(ctx context.Context, req *CreateDataSourceDefinitionRequest) (*DataSourceDefinition, error) {
1717	return nil, status.Errorf(codes.Unimplemented, "method CreateDataSourceDefinition not implemented")
1718}
1719func (*UnimplementedDataSourceServiceServer) UpdateDataSourceDefinition(ctx context.Context, req *UpdateDataSourceDefinitionRequest) (*DataSourceDefinition, error) {
1720	return nil, status.Errorf(codes.Unimplemented, "method UpdateDataSourceDefinition not implemented")
1721}
1722func (*UnimplementedDataSourceServiceServer) DeleteDataSourceDefinition(ctx context.Context, req *DeleteDataSourceDefinitionRequest) (*empty.Empty, error) {
1723	return nil, status.Errorf(codes.Unimplemented, "method DeleteDataSourceDefinition not implemented")
1724}
1725func (*UnimplementedDataSourceServiceServer) GetDataSourceDefinition(ctx context.Context, req *GetDataSourceDefinitionRequest) (*DataSourceDefinition, error) {
1726	return nil, status.Errorf(codes.Unimplemented, "method GetDataSourceDefinition not implemented")
1727}
1728func (*UnimplementedDataSourceServiceServer) ListDataSourceDefinitions(ctx context.Context, req *ListDataSourceDefinitionsRequest) (*ListDataSourceDefinitionsResponse, error) {
1729	return nil, status.Errorf(codes.Unimplemented, "method ListDataSourceDefinitions not implemented")
1730}
1731
1732func RegisterDataSourceServiceServer(s *grpc.Server, srv DataSourceServiceServer) {
1733	s.RegisterService(&_DataSourceService_serviceDesc, srv)
1734}
1735
1736func _DataSourceService_UpdateTransferRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
1737	in := new(UpdateTransferRunRequest)
1738	if err := dec(in); err != nil {
1739		return nil, err
1740	}
1741	if interceptor == nil {
1742		return srv.(DataSourceServiceServer).UpdateTransferRun(ctx, in)
1743	}
1744	info := &grpc.UnaryServerInfo{
1745		Server:     srv,
1746		FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/UpdateTransferRun",
1747	}
1748	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
1749		return srv.(DataSourceServiceServer).UpdateTransferRun(ctx, req.(*UpdateTransferRunRequest))
1750	}
1751	return interceptor(ctx, in, info, handler)
1752}
1753
1754func _DataSourceService_LogTransferRunMessages_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
1755	in := new(LogTransferRunMessagesRequest)
1756	if err := dec(in); err != nil {
1757		return nil, err
1758	}
1759	if interceptor == nil {
1760		return srv.(DataSourceServiceServer).LogTransferRunMessages(ctx, in)
1761	}
1762	info := &grpc.UnaryServerInfo{
1763		Server:     srv,
1764		FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/LogTransferRunMessages",
1765	}
1766	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
1767		return srv.(DataSourceServiceServer).LogTransferRunMessages(ctx, req.(*LogTransferRunMessagesRequest))
1768	}
1769	return interceptor(ctx, in, info, handler)
1770}
1771
1772func _DataSourceService_StartBigQueryJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
1773	in := new(StartBigQueryJobsRequest)
1774	if err := dec(in); err != nil {
1775		return nil, err
1776	}
1777	if interceptor == nil {
1778		return srv.(DataSourceServiceServer).StartBigQueryJobs(ctx, in)
1779	}
1780	info := &grpc.UnaryServerInfo{
1781		Server:     srv,
1782		FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/StartBigQueryJobs",
1783	}
1784	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
1785		return srv.(DataSourceServiceServer).StartBigQueryJobs(ctx, req.(*StartBigQueryJobsRequest))
1786	}
1787	return interceptor(ctx, in, info, handler)
1788}
1789
1790func _DataSourceService_FinishRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
1791	in := new(FinishRunRequest)
1792	if err := dec(in); err != nil {
1793		return nil, err
1794	}
1795	if interceptor == nil {
1796		return srv.(DataSourceServiceServer).FinishRun(ctx, in)
1797	}
1798	info := &grpc.UnaryServerInfo{
1799		Server:     srv,
1800		FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/FinishRun",
1801	}
1802	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
1803		return srv.(DataSourceServiceServer).FinishRun(ctx, req.(*FinishRunRequest))
1804	}
1805	return interceptor(ctx, in, info, handler)
1806}
1807
1808func _DataSourceService_CreateDataSourceDefinition_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
1809	in := new(CreateDataSourceDefinitionRequest)
1810	if err := dec(in); err != nil {
1811		return nil, err
1812	}
1813	if interceptor == nil {
1814		return srv.(DataSourceServiceServer).CreateDataSourceDefinition(ctx, in)
1815	}
1816	info := &grpc.UnaryServerInfo{
1817		Server:     srv,
1818		FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/CreateDataSourceDefinition",
1819	}
1820	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
1821		return srv.(DataSourceServiceServer).CreateDataSourceDefinition(ctx, req.(*CreateDataSourceDefinitionRequest))
1822	}
1823	return interceptor(ctx, in, info, handler)
1824}
1825
1826func _DataSourceService_UpdateDataSourceDefinition_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
1827	in := new(UpdateDataSourceDefinitionRequest)
1828	if err := dec(in); err != nil {
1829		return nil, err
1830	}
1831	if interceptor == nil {
1832		return srv.(DataSourceServiceServer).UpdateDataSourceDefinition(ctx, in)
1833	}
1834	info := &grpc.UnaryServerInfo{
1835		Server:     srv,
1836		FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/UpdateDataSourceDefinition",
1837	}
1838	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
1839		return srv.(DataSourceServiceServer).UpdateDataSourceDefinition(ctx, req.(*UpdateDataSourceDefinitionRequest))
1840	}
1841	return interceptor(ctx, in, info, handler)
1842}
1843
1844func _DataSourceService_DeleteDataSourceDefinition_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
1845	in := new(DeleteDataSourceDefinitionRequest)
1846	if err := dec(in); err != nil {
1847		return nil, err
1848	}
1849	if interceptor == nil {
1850		return srv.(DataSourceServiceServer).DeleteDataSourceDefinition(ctx, in)
1851	}
1852	info := &grpc.UnaryServerInfo{
1853		Server:     srv,
1854		FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/DeleteDataSourceDefinition",
1855	}
1856	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
1857		return srv.(DataSourceServiceServer).DeleteDataSourceDefinition(ctx, req.(*DeleteDataSourceDefinitionRequest))
1858	}
1859	return interceptor(ctx, in, info, handler)
1860}
1861
1862func _DataSourceService_GetDataSourceDefinition_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
1863	in := new(GetDataSourceDefinitionRequest)
1864	if err := dec(in); err != nil {
1865		return nil, err
1866	}
1867	if interceptor == nil {
1868		return srv.(DataSourceServiceServer).GetDataSourceDefinition(ctx, in)
1869	}
1870	info := &grpc.UnaryServerInfo{
1871		Server:     srv,
1872		FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/GetDataSourceDefinition",
1873	}
1874	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
1875		return srv.(DataSourceServiceServer).GetDataSourceDefinition(ctx, req.(*GetDataSourceDefinitionRequest))
1876	}
1877	return interceptor(ctx, in, info, handler)
1878}
1879
1880func _DataSourceService_ListDataSourceDefinitions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
1881	in := new(ListDataSourceDefinitionsRequest)
1882	if err := dec(in); err != nil {
1883		return nil, err
1884	}
1885	if interceptor == nil {
1886		return srv.(DataSourceServiceServer).ListDataSourceDefinitions(ctx, in)
1887	}
1888	info := &grpc.UnaryServerInfo{
1889		Server:     srv,
1890		FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/ListDataSourceDefinitions",
1891	}
1892	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
1893		return srv.(DataSourceServiceServer).ListDataSourceDefinitions(ctx, req.(*ListDataSourceDefinitionsRequest))
1894	}
1895	return interceptor(ctx, in, info, handler)
1896}
1897
1898var _DataSourceService_serviceDesc = grpc.ServiceDesc{
1899	ServiceName: "google.cloud.bigquery.datatransfer.v1.DataSourceService",
1900	HandlerType: (*DataSourceServiceServer)(nil),
1901	Methods: []grpc.MethodDesc{
1902		{
1903			MethodName: "UpdateTransferRun",
1904			Handler:    _DataSourceService_UpdateTransferRun_Handler,
1905		},
1906		{
1907			MethodName: "LogTransferRunMessages",
1908			Handler:    _DataSourceService_LogTransferRunMessages_Handler,
1909		},
1910		{
1911			MethodName: "StartBigQueryJobs",
1912			Handler:    _DataSourceService_StartBigQueryJobs_Handler,
1913		},
1914		{
1915			MethodName: "FinishRun",
1916			Handler:    _DataSourceService_FinishRun_Handler,
1917		},
1918		{
1919			MethodName: "CreateDataSourceDefinition",
1920			Handler:    _DataSourceService_CreateDataSourceDefinition_Handler,
1921		},
1922		{
1923			MethodName: "UpdateDataSourceDefinition",
1924			Handler:    _DataSourceService_UpdateDataSourceDefinition_Handler,
1925		},
1926		{
1927			MethodName: "DeleteDataSourceDefinition",
1928			Handler:    _DataSourceService_DeleteDataSourceDefinition_Handler,
1929		},
1930		{
1931			MethodName: "GetDataSourceDefinition",
1932			Handler:    _DataSourceService_GetDataSourceDefinition_Handler,
1933		},
1934		{
1935			MethodName: "ListDataSourceDefinitions",
1936			Handler:    _DataSourceService_ListDataSourceDefinitions_Handler,
1937		},
1938	},
1939	Streams:  []grpc.StreamDesc{},
1940	Metadata: "google/cloud/bigquery/datatransfer/v1/datasource.proto",
1941}
1942