1// Code generated by protoc-gen-go. DO NOT EDIT.
2// source: google/cloud/dataproc/v1beta2/jobs.proto
3
4package dataproc
5
6import (
7	context "context"
8	fmt "fmt"
9	math "math"
10
11	proto "github.com/golang/protobuf/proto"
12	empty "github.com/golang/protobuf/ptypes/empty"
13	timestamp "github.com/golang/protobuf/ptypes/timestamp"
14	_ "google.golang.org/genproto/googleapis/api/annotations"
15	field_mask "google.golang.org/genproto/protobuf/field_mask"
16	grpc "google.golang.org/grpc"
17)
18
19// Reference imports to suppress errors if they are not otherwise used.
20var _ = proto.Marshal
21var _ = fmt.Errorf
22var _ = math.Inf
23
24// This is a compile-time assertion to ensure that this generated file
25// is compatible with the proto package it is being compiled against.
26// A compilation error at this line likely means your copy of the
27// proto package needs to be updated.
28const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
29
30// The Log4j level for job execution. When running an
31// [Apache Hive](http://hive.apache.org/) job, Cloud
32// Dataproc configures the Hive client to an equivalent verbosity level.
33type LoggingConfig_Level int32
34
35const (
36	// Level is unspecified. Use default level for log4j.
37	LoggingConfig_LEVEL_UNSPECIFIED LoggingConfig_Level = 0
38	// Use ALL level for log4j.
39	LoggingConfig_ALL LoggingConfig_Level = 1
40	// Use TRACE level for log4j.
41	LoggingConfig_TRACE LoggingConfig_Level = 2
42	// Use DEBUG level for log4j.
43	LoggingConfig_DEBUG LoggingConfig_Level = 3
44	// Use INFO level for log4j.
45	LoggingConfig_INFO LoggingConfig_Level = 4
46	// Use WARN level for log4j.
47	LoggingConfig_WARN LoggingConfig_Level = 5
48	// Use ERROR level for log4j.
49	LoggingConfig_ERROR LoggingConfig_Level = 6
50	// Use FATAL level for log4j.
51	LoggingConfig_FATAL LoggingConfig_Level = 7
52	// Turn off log4j.
53	LoggingConfig_OFF LoggingConfig_Level = 8
54)
55
56var LoggingConfig_Level_name = map[int32]string{
57	0: "LEVEL_UNSPECIFIED",
58	1: "ALL",
59	2: "TRACE",
60	3: "DEBUG",
61	4: "INFO",
62	5: "WARN",
63	6: "ERROR",
64	7: "FATAL",
65	8: "OFF",
66}
67
68var LoggingConfig_Level_value = map[string]int32{
69	"LEVEL_UNSPECIFIED": 0,
70	"ALL":               1,
71	"TRACE":             2,
72	"DEBUG":             3,
73	"INFO":              4,
74	"WARN":              5,
75	"ERROR":             6,
76	"FATAL":             7,
77	"OFF":               8,
78}
79
80func (x LoggingConfig_Level) String() string {
81	return proto.EnumName(LoggingConfig_Level_name, int32(x))
82}
83
84func (LoggingConfig_Level) EnumDescriptor() ([]byte, []int) {
85	return fileDescriptor_20fb118582e1d7de, []int{0, 0}
86}
87
88// The job state.
89type JobStatus_State int32
90
91const (
92	// The job state is unknown.
93	JobStatus_STATE_UNSPECIFIED JobStatus_State = 0
94	// The job is pending; it has been submitted, but is not yet running.
95	JobStatus_PENDING JobStatus_State = 1
96	// Job has been received by the service and completed initial setup;
97	// it will soon be submitted to the cluster.
98	JobStatus_SETUP_DONE JobStatus_State = 8
99	// The job is running on the cluster.
100	JobStatus_RUNNING JobStatus_State = 2
101	// A CancelJob request has been received, but is pending.
102	JobStatus_CANCEL_PENDING JobStatus_State = 3
103	// Transient in-flight resources have been canceled, and the request to
104	// cancel the running job has been issued to the cluster.
105	JobStatus_CANCEL_STARTED JobStatus_State = 7
106	// The job cancellation was successful.
107	JobStatus_CANCELLED JobStatus_State = 4
108	// The job has completed successfully.
109	JobStatus_DONE JobStatus_State = 5
110	// The job has completed, but encountered an error.
111	JobStatus_ERROR JobStatus_State = 6
112	// Job attempt has failed. The detail field contains failure details for
113	// this attempt.
114	//
115	// Applies to restartable jobs only.
116	JobStatus_ATTEMPT_FAILURE JobStatus_State = 9
117)
118
119var JobStatus_State_name = map[int32]string{
120	0: "STATE_UNSPECIFIED",
121	1: "PENDING",
122	8: "SETUP_DONE",
123	2: "RUNNING",
124	3: "CANCEL_PENDING",
125	7: "CANCEL_STARTED",
126	4: "CANCELLED",
127	5: "DONE",
128	6: "ERROR",
129	9: "ATTEMPT_FAILURE",
130}
131
132var JobStatus_State_value = map[string]int32{
133	"STATE_UNSPECIFIED": 0,
134	"PENDING":           1,
135	"SETUP_DONE":        8,
136	"RUNNING":           2,
137	"CANCEL_PENDING":    3,
138	"CANCEL_STARTED":    7,
139	"CANCELLED":         4,
140	"DONE":              5,
141	"ERROR":             6,
142	"ATTEMPT_FAILURE":   9,
143}
144
145func (x JobStatus_State) String() string {
146	return proto.EnumName(JobStatus_State_name, int32(x))
147}
148
149func (JobStatus_State) EnumDescriptor() ([]byte, []int) {
150	return fileDescriptor_20fb118582e1d7de, []int{10, 0}
151}
152
153// The job substate.
154type JobStatus_Substate int32
155
156const (
157	// The job substate is unknown.
158	JobStatus_UNSPECIFIED JobStatus_Substate = 0
159	// The Job is submitted to the agent.
160	//
161	// Applies to RUNNING state.
162	JobStatus_SUBMITTED JobStatus_Substate = 1
163	// The Job has been received and is awaiting execution (it may be waiting
164	// for a condition to be met). See the "details" field for the reason for
165	// the delay.
166	//
167	// Applies to RUNNING state.
168	JobStatus_QUEUED JobStatus_Substate = 2
169	// The agent-reported status is out of date, which may be caused by a
170	// loss of communication between the agent and Cloud Dataproc. If the
171	// agent does not send a timely update, the job will fail.
172	//
173	// Applies to RUNNING state.
174	JobStatus_STALE_STATUS JobStatus_Substate = 3
175)
176
177var JobStatus_Substate_name = map[int32]string{
178	0: "UNSPECIFIED",
179	1: "SUBMITTED",
180	2: "QUEUED",
181	3: "STALE_STATUS",
182}
183
184var JobStatus_Substate_value = map[string]int32{
185	"UNSPECIFIED":  0,
186	"SUBMITTED":    1,
187	"QUEUED":       2,
188	"STALE_STATUS": 3,
189}
190
191func (x JobStatus_Substate) String() string {
192	return proto.EnumName(JobStatus_Substate_name, int32(x))
193}
194
195func (JobStatus_Substate) EnumDescriptor() ([]byte, []int) {
196	return fileDescriptor_20fb118582e1d7de, []int{10, 1}
197}
198
199// The application state, corresponding to
200// <code>YarnProtos.YarnApplicationStateProto</code>.
201type YarnApplication_State int32
202
203const (
204	// Status is unspecified.
205	YarnApplication_STATE_UNSPECIFIED YarnApplication_State = 0
206	// Status is NEW.
207	YarnApplication_NEW YarnApplication_State = 1
208	// Status is NEW_SAVING.
209	YarnApplication_NEW_SAVING YarnApplication_State = 2
210	// Status is SUBMITTED.
211	YarnApplication_SUBMITTED YarnApplication_State = 3
212	// Status is ACCEPTED.
213	YarnApplication_ACCEPTED YarnApplication_State = 4
214	// Status is RUNNING.
215	YarnApplication_RUNNING YarnApplication_State = 5
216	// Status is FINISHED.
217	YarnApplication_FINISHED YarnApplication_State = 6
218	// Status is FAILED.
219	YarnApplication_FAILED YarnApplication_State = 7
220	// Status is KILLED.
221	YarnApplication_KILLED YarnApplication_State = 8
222)
223
224var YarnApplication_State_name = map[int32]string{
225	0: "STATE_UNSPECIFIED",
226	1: "NEW",
227	2: "NEW_SAVING",
228	3: "SUBMITTED",
229	4: "ACCEPTED",
230	5: "RUNNING",
231	6: "FINISHED",
232	7: "FAILED",
233	8: "KILLED",
234}
235
236var YarnApplication_State_value = map[string]int32{
237	"STATE_UNSPECIFIED": 0,
238	"NEW":               1,
239	"NEW_SAVING":        2,
240	"SUBMITTED":         3,
241	"ACCEPTED":          4,
242	"RUNNING":           5,
243	"FINISHED":          6,
244	"FAILED":            7,
245	"KILLED":            8,
246}
247
248func (x YarnApplication_State) String() string {
249	return proto.EnumName(YarnApplication_State_name, int32(x))
250}
251
252func (YarnApplication_State) EnumDescriptor() ([]byte, []int) {
253	return fileDescriptor_20fb118582e1d7de, []int{12, 0}
254}
255
256// A matcher that specifies categories of job states.
257type ListJobsRequest_JobStateMatcher int32
258
259const (
260	// Match all jobs, regardless of state.
261	ListJobsRequest_ALL ListJobsRequest_JobStateMatcher = 0
262	// Only match jobs in non-terminal states: PENDING, RUNNING, or
263	// CANCEL_PENDING.
264	ListJobsRequest_ACTIVE ListJobsRequest_JobStateMatcher = 1
265	// Only match jobs in terminal states: CANCELLED, DONE, or ERROR.
266	ListJobsRequest_NON_ACTIVE ListJobsRequest_JobStateMatcher = 2
267)
268
269var ListJobsRequest_JobStateMatcher_name = map[int32]string{
270	0: "ALL",
271	1: "ACTIVE",
272	2: "NON_ACTIVE",
273}
274
275var ListJobsRequest_JobStateMatcher_value = map[string]int32{
276	"ALL":        0,
277	"ACTIVE":     1,
278	"NON_ACTIVE": 2,
279}
280
281func (x ListJobsRequest_JobStateMatcher) String() string {
282	return proto.EnumName(ListJobsRequest_JobStateMatcher_name, int32(x))
283}
284
285func (ListJobsRequest_JobStateMatcher) EnumDescriptor() ([]byte, []int) {
286	return fileDescriptor_20fb118582e1d7de, []int{17, 0}
287}
288
289// The runtime logging config of the job.
290type LoggingConfig struct {
291	// The per-package log levels for the driver. This may include
292	// "root" package name to configure rootLogger.
293	// Examples:
294	//   'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
295	DriverLogLevels      map[string]LoggingConfig_Level `protobuf:"bytes,2,rep,name=driver_log_levels,json=driverLogLevels,proto3" json:"driver_log_levels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3,enum=google.cloud.dataproc.v1beta2.LoggingConfig_Level"`
296	XXX_NoUnkeyedLiteral struct{}                       `json:"-"`
297	XXX_unrecognized     []byte                         `json:"-"`
298	XXX_sizecache        int32                          `json:"-"`
299}
300
301func (m *LoggingConfig) Reset()         { *m = LoggingConfig{} }
302func (m *LoggingConfig) String() string { return proto.CompactTextString(m) }
303func (*LoggingConfig) ProtoMessage()    {}
304func (*LoggingConfig) Descriptor() ([]byte, []int) {
305	return fileDescriptor_20fb118582e1d7de, []int{0}
306}
307
308func (m *LoggingConfig) XXX_Unmarshal(b []byte) error {
309	return xxx_messageInfo_LoggingConfig.Unmarshal(m, b)
310}
311func (m *LoggingConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
312	return xxx_messageInfo_LoggingConfig.Marshal(b, m, deterministic)
313}
314func (m *LoggingConfig) XXX_Merge(src proto.Message) {
315	xxx_messageInfo_LoggingConfig.Merge(m, src)
316}
317func (m *LoggingConfig) XXX_Size() int {
318	return xxx_messageInfo_LoggingConfig.Size(m)
319}
320func (m *LoggingConfig) XXX_DiscardUnknown() {
321	xxx_messageInfo_LoggingConfig.DiscardUnknown(m)
322}
323
324var xxx_messageInfo_LoggingConfig proto.InternalMessageInfo
325
326func (m *LoggingConfig) GetDriverLogLevels() map[string]LoggingConfig_Level {
327	if m != nil {
328		return m.DriverLogLevels
329	}
330	return nil
331}
332
333// A Cloud Dataproc job for running
334// [Apache Hadoop
335// MapReduce](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html)
336// jobs on [Apache Hadoop
337// YARN](https://hadoop.apache.org/docs/r2.7.1/hadoop-yarn/hadoop-yarn-site/YARN.html).
338type HadoopJob struct {
339	// Required. Indicates the location of the driver's main class. Specify
340	// either the jar file that contains the main class or the main class name.
341	// To specify both, add the jar file to `jar_file_uris`, and then specify
342	// the main class name in this property.
343	//
344	// Types that are valid to be assigned to Driver:
345	//	*HadoopJob_MainJarFileUri
346	//	*HadoopJob_MainClass
347	Driver isHadoopJob_Driver `protobuf_oneof:"driver"`
348	// Optional. The arguments to pass to the driver. Do not
349	// include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as
350	// job properties, since a collision may occur that causes an incorrect job
351	// submission.
352	Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"`
353	// Optional. Jar file URIs to add to the CLASSPATHs of the
354	// Hadoop driver and tasks.
355	JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
356	// Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to be copied
357	// to the working directory of Hadoop drivers and distributed tasks. Useful
358	// for naively parallel tasks.
359	FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"`
360	// Optional. HCFS URIs of archives to be extracted in the working directory of
361	// Hadoop drivers and tasks. Supported file types:
362	// .jar, .tar, .tar.gz, .tgz, or .zip.
363	ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"`
364	// Optional. A mapping of property names to values, used to configure Hadoop.
365	// Properties that conflict with values set by the Cloud Dataproc API may be
366	// overwritten. Can include properties set in /etc/hadoop/conf/*-site and
367	// classes in user code.
368	Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
369	// Optional. The runtime log config for job execution.
370	LoggingConfig        *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
371	XXX_NoUnkeyedLiteral struct{}       `json:"-"`
372	XXX_unrecognized     []byte         `json:"-"`
373	XXX_sizecache        int32          `json:"-"`
374}
375
376func (m *HadoopJob) Reset()         { *m = HadoopJob{} }
377func (m *HadoopJob) String() string { return proto.CompactTextString(m) }
378func (*HadoopJob) ProtoMessage()    {}
379func (*HadoopJob) Descriptor() ([]byte, []int) {
380	return fileDescriptor_20fb118582e1d7de, []int{1}
381}
382
383func (m *HadoopJob) XXX_Unmarshal(b []byte) error {
384	return xxx_messageInfo_HadoopJob.Unmarshal(m, b)
385}
386func (m *HadoopJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
387	return xxx_messageInfo_HadoopJob.Marshal(b, m, deterministic)
388}
389func (m *HadoopJob) XXX_Merge(src proto.Message) {
390	xxx_messageInfo_HadoopJob.Merge(m, src)
391}
392func (m *HadoopJob) XXX_Size() int {
393	return xxx_messageInfo_HadoopJob.Size(m)
394}
395func (m *HadoopJob) XXX_DiscardUnknown() {
396	xxx_messageInfo_HadoopJob.DiscardUnknown(m)
397}
398
399var xxx_messageInfo_HadoopJob proto.InternalMessageInfo
400
401type isHadoopJob_Driver interface {
402	isHadoopJob_Driver()
403}
404
405type HadoopJob_MainJarFileUri struct {
406	MainJarFileUri string `protobuf:"bytes,1,opt,name=main_jar_file_uri,json=mainJarFileUri,proto3,oneof"`
407}
408
409type HadoopJob_MainClass struct {
410	MainClass string `protobuf:"bytes,2,opt,name=main_class,json=mainClass,proto3,oneof"`
411}
412
413func (*HadoopJob_MainJarFileUri) isHadoopJob_Driver() {}
414
415func (*HadoopJob_MainClass) isHadoopJob_Driver() {}
416
417func (m *HadoopJob) GetDriver() isHadoopJob_Driver {
418	if m != nil {
419		return m.Driver
420	}
421	return nil
422}
423
424func (m *HadoopJob) GetMainJarFileUri() string {
425	if x, ok := m.GetDriver().(*HadoopJob_MainJarFileUri); ok {
426		return x.MainJarFileUri
427	}
428	return ""
429}
430
431func (m *HadoopJob) GetMainClass() string {
432	if x, ok := m.GetDriver().(*HadoopJob_MainClass); ok {
433		return x.MainClass
434	}
435	return ""
436}
437
438func (m *HadoopJob) GetArgs() []string {
439	if m != nil {
440		return m.Args
441	}
442	return nil
443}
444
445func (m *HadoopJob) GetJarFileUris() []string {
446	if m != nil {
447		return m.JarFileUris
448	}
449	return nil
450}
451
452func (m *HadoopJob) GetFileUris() []string {
453	if m != nil {
454		return m.FileUris
455	}
456	return nil
457}
458
459func (m *HadoopJob) GetArchiveUris() []string {
460	if m != nil {
461		return m.ArchiveUris
462	}
463	return nil
464}
465
466func (m *HadoopJob) GetProperties() map[string]string {
467	if m != nil {
468		return m.Properties
469	}
470	return nil
471}
472
473func (m *HadoopJob) GetLoggingConfig() *LoggingConfig {
474	if m != nil {
475		return m.LoggingConfig
476	}
477	return nil
478}
479
480// XXX_OneofWrappers is for the internal use of the proto package.
481func (*HadoopJob) XXX_OneofWrappers() []interface{} {
482	return []interface{}{
483		(*HadoopJob_MainJarFileUri)(nil),
484		(*HadoopJob_MainClass)(nil),
485	}
486}
487
488// A Cloud Dataproc job for running [Apache Spark](http://spark.apache.org/)
489// applications on YARN.
490type SparkJob struct {
491	// Required. The specification of the main method to call to drive the job.
492	// Specify either the jar file that contains the main class or the main class
493	// name. To pass both a main jar and a main class in that jar, add the jar to
494	// `CommonJob.jar_file_uris`, and then specify the main class name in
495	// `main_class`.
496	//
497	// Types that are valid to be assigned to Driver:
498	//	*SparkJob_MainJarFileUri
499	//	*SparkJob_MainClass
500	Driver isSparkJob_Driver `protobuf_oneof:"driver"`
501	// Optional. The arguments to pass to the driver. Do not include arguments,
502	// such as `--conf`, that can be set as job properties, since a collision may
503	// occur that causes an incorrect job submission.
504	Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"`
505	// Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
506	// Spark driver and tasks.
507	JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
508	// Optional. HCFS URIs of files to be copied to the working directory of
509	// Spark drivers and distributed tasks. Useful for naively parallel tasks.
510	FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"`
511	// Optional. HCFS URIs of archives to be extracted in the working directory
512	// of Spark drivers and tasks. Supported file types:
513	// .jar, .tar, .tar.gz, .tgz, and .zip.
514	ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"`
515	// Optional. A mapping of property names to values, used to configure Spark.
516	// Properties that conflict with values set by the Cloud Dataproc API may be
517	// overwritten. Can include properties set in
518	// /etc/spark/conf/spark-defaults.conf and classes in user code.
519	Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
520	// Optional. The runtime log config for job execution.
521	LoggingConfig        *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
522	XXX_NoUnkeyedLiteral struct{}       `json:"-"`
523	XXX_unrecognized     []byte         `json:"-"`
524	XXX_sizecache        int32          `json:"-"`
525}
526
527func (m *SparkJob) Reset()         { *m = SparkJob{} }
528func (m *SparkJob) String() string { return proto.CompactTextString(m) }
529func (*SparkJob) ProtoMessage()    {}
530func (*SparkJob) Descriptor() ([]byte, []int) {
531	return fileDescriptor_20fb118582e1d7de, []int{2}
532}
533
534func (m *SparkJob) XXX_Unmarshal(b []byte) error {
535	return xxx_messageInfo_SparkJob.Unmarshal(m, b)
536}
537func (m *SparkJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
538	return xxx_messageInfo_SparkJob.Marshal(b, m, deterministic)
539}
540func (m *SparkJob) XXX_Merge(src proto.Message) {
541	xxx_messageInfo_SparkJob.Merge(m, src)
542}
543func (m *SparkJob) XXX_Size() int {
544	return xxx_messageInfo_SparkJob.Size(m)
545}
546func (m *SparkJob) XXX_DiscardUnknown() {
547	xxx_messageInfo_SparkJob.DiscardUnknown(m)
548}
549
550var xxx_messageInfo_SparkJob proto.InternalMessageInfo
551
552type isSparkJob_Driver interface {
553	isSparkJob_Driver()
554}
555
556type SparkJob_MainJarFileUri struct {
557	MainJarFileUri string `protobuf:"bytes,1,opt,name=main_jar_file_uri,json=mainJarFileUri,proto3,oneof"`
558}
559
560type SparkJob_MainClass struct {
561	MainClass string `protobuf:"bytes,2,opt,name=main_class,json=mainClass,proto3,oneof"`
562}
563
564func (*SparkJob_MainJarFileUri) isSparkJob_Driver() {}
565
566func (*SparkJob_MainClass) isSparkJob_Driver() {}
567
568func (m *SparkJob) GetDriver() isSparkJob_Driver {
569	if m != nil {
570		return m.Driver
571	}
572	return nil
573}
574
575func (m *SparkJob) GetMainJarFileUri() string {
576	if x, ok := m.GetDriver().(*SparkJob_MainJarFileUri); ok {
577		return x.MainJarFileUri
578	}
579	return ""
580}
581
582func (m *SparkJob) GetMainClass() string {
583	if x, ok := m.GetDriver().(*SparkJob_MainClass); ok {
584		return x.MainClass
585	}
586	return ""
587}
588
589func (m *SparkJob) GetArgs() []string {
590	if m != nil {
591		return m.Args
592	}
593	return nil
594}
595
596func (m *SparkJob) GetJarFileUris() []string {
597	if m != nil {
598		return m.JarFileUris
599	}
600	return nil
601}
602
603func (m *SparkJob) GetFileUris() []string {
604	if m != nil {
605		return m.FileUris
606	}
607	return nil
608}
609
610func (m *SparkJob) GetArchiveUris() []string {
611	if m != nil {
612		return m.ArchiveUris
613	}
614	return nil
615}
616
617func (m *SparkJob) GetProperties() map[string]string {
618	if m != nil {
619		return m.Properties
620	}
621	return nil
622}
623
624func (m *SparkJob) GetLoggingConfig() *LoggingConfig {
625	if m != nil {
626		return m.LoggingConfig
627	}
628	return nil
629}
630
631// XXX_OneofWrappers is for the internal use of the proto package.
632func (*SparkJob) XXX_OneofWrappers() []interface{} {
633	return []interface{}{
634		(*SparkJob_MainJarFileUri)(nil),
635		(*SparkJob_MainClass)(nil),
636	}
637}
638
639// A Cloud Dataproc job for running
640// [Apache
641// PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html)
642// applications on YARN.
643type PySparkJob struct {
644	// Required. The HCFS URI of the main Python file to use as the driver. Must
645	// be a .py file.
646	MainPythonFileUri string `protobuf:"bytes,1,opt,name=main_python_file_uri,json=mainPythonFileUri,proto3" json:"main_python_file_uri,omitempty"`
647	// Optional. The arguments to pass to the driver.  Do not include arguments,
648	// such as `--conf`, that can be set as job properties, since a collision may
649	// occur that causes an incorrect job submission.
650	Args []string `protobuf:"bytes,2,rep,name=args,proto3" json:"args,omitempty"`
651	// Optional. HCFS file URIs of Python files to pass to the PySpark
652	// framework. Supported file types: .py, .egg, and .zip.
653	PythonFileUris []string `protobuf:"bytes,3,rep,name=python_file_uris,json=pythonFileUris,proto3" json:"python_file_uris,omitempty"`
654	// Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
655	// Python driver and tasks.
656	JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
657	// Optional. HCFS URIs of files to be copied to the working directory of
658	// Python drivers and distributed tasks. Useful for naively parallel tasks.
659	FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"`
660	// Optional. HCFS URIs of archives to be extracted in the working directory of
661	// .jar, .tar, .tar.gz, .tgz, and .zip.
662	ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"`
663	// Optional. A mapping of property names to values, used to configure PySpark.
664	// Properties that conflict with values set by the Cloud Dataproc API may be
665	// overwritten. Can include properties set in
666	// /etc/spark/conf/spark-defaults.conf and classes in user code.
667	Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
668	// Optional. The runtime log config for job execution.
669	LoggingConfig        *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
670	XXX_NoUnkeyedLiteral struct{}       `json:"-"`
671	XXX_unrecognized     []byte         `json:"-"`
672	XXX_sizecache        int32          `json:"-"`
673}
674
675func (m *PySparkJob) Reset()         { *m = PySparkJob{} }
676func (m *PySparkJob) String() string { return proto.CompactTextString(m) }
677func (*PySparkJob) ProtoMessage()    {}
678func (*PySparkJob) Descriptor() ([]byte, []int) {
679	return fileDescriptor_20fb118582e1d7de, []int{3}
680}
681
682func (m *PySparkJob) XXX_Unmarshal(b []byte) error {
683	return xxx_messageInfo_PySparkJob.Unmarshal(m, b)
684}
685func (m *PySparkJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
686	return xxx_messageInfo_PySparkJob.Marshal(b, m, deterministic)
687}
688func (m *PySparkJob) XXX_Merge(src proto.Message) {
689	xxx_messageInfo_PySparkJob.Merge(m, src)
690}
691func (m *PySparkJob) XXX_Size() int {
692	return xxx_messageInfo_PySparkJob.Size(m)
693}
694func (m *PySparkJob) XXX_DiscardUnknown() {
695	xxx_messageInfo_PySparkJob.DiscardUnknown(m)
696}
697
698var xxx_messageInfo_PySparkJob proto.InternalMessageInfo
699
700func (m *PySparkJob) GetMainPythonFileUri() string {
701	if m != nil {
702		return m.MainPythonFileUri
703	}
704	return ""
705}
706
707func (m *PySparkJob) GetArgs() []string {
708	if m != nil {
709		return m.Args
710	}
711	return nil
712}
713
714func (m *PySparkJob) GetPythonFileUris() []string {
715	if m != nil {
716		return m.PythonFileUris
717	}
718	return nil
719}
720
721func (m *PySparkJob) GetJarFileUris() []string {
722	if m != nil {
723		return m.JarFileUris
724	}
725	return nil
726}
727
728func (m *PySparkJob) GetFileUris() []string {
729	if m != nil {
730		return m.FileUris
731	}
732	return nil
733}
734
735func (m *PySparkJob) GetArchiveUris() []string {
736	if m != nil {
737		return m.ArchiveUris
738	}
739	return nil
740}
741
742func (m *PySparkJob) GetProperties() map[string]string {
743	if m != nil {
744		return m.Properties
745	}
746	return nil
747}
748
749func (m *PySparkJob) GetLoggingConfig() *LoggingConfig {
750	if m != nil {
751		return m.LoggingConfig
752	}
753	return nil
754}
755
756// A list of queries to run on a cluster.
757type QueryList struct {
758	// Required. The queries to execute. You do not need to terminate a query
759	// with a semicolon. Multiple queries can be specified in one string
760	// by separating each with a semicolon. Here is an example of an Cloud
761	// Dataproc API snippet that uses a QueryList to specify a HiveJob:
762	//
763	//     "hiveJob": {
764	//       "queryList": {
765	//         "queries": [
766	//           "query1",
767	//           "query2",
768	//           "query3;query4",
769	//         ]
770	//       }
771	//     }
772	Queries              []string `protobuf:"bytes,1,rep,name=queries,proto3" json:"queries,omitempty"`
773	XXX_NoUnkeyedLiteral struct{} `json:"-"`
774	XXX_unrecognized     []byte   `json:"-"`
775	XXX_sizecache        int32    `json:"-"`
776}
777
778func (m *QueryList) Reset()         { *m = QueryList{} }
779func (m *QueryList) String() string { return proto.CompactTextString(m) }
780func (*QueryList) ProtoMessage()    {}
781func (*QueryList) Descriptor() ([]byte, []int) {
782	return fileDescriptor_20fb118582e1d7de, []int{4}
783}
784
785func (m *QueryList) XXX_Unmarshal(b []byte) error {
786	return xxx_messageInfo_QueryList.Unmarshal(m, b)
787}
788func (m *QueryList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
789	return xxx_messageInfo_QueryList.Marshal(b, m, deterministic)
790}
791func (m *QueryList) XXX_Merge(src proto.Message) {
792	xxx_messageInfo_QueryList.Merge(m, src)
793}
794func (m *QueryList) XXX_Size() int {
795	return xxx_messageInfo_QueryList.Size(m)
796}
797func (m *QueryList) XXX_DiscardUnknown() {
798	xxx_messageInfo_QueryList.DiscardUnknown(m)
799}
800
801var xxx_messageInfo_QueryList proto.InternalMessageInfo
802
803func (m *QueryList) GetQueries() []string {
804	if m != nil {
805		return m.Queries
806	}
807	return nil
808}
809
810// A Cloud Dataproc job for running [Apache Hive](https://hive.apache.org/)
811// queries on YARN.
812type HiveJob struct {
813	// Required. The sequence of Hive queries to execute, specified as either
814	// an HCFS file URI or a list of queries.
815	//
816	// Types that are valid to be assigned to Queries:
817	//	*HiveJob_QueryFileUri
818	//	*HiveJob_QueryList
819	Queries isHiveJob_Queries `protobuf_oneof:"queries"`
820	// Optional. Whether to continue executing queries if a query fails.
821	// The default value is `false`. Setting to `true` can be useful when
822	// executing independent parallel queries.
823	ContinueOnFailure bool `protobuf:"varint,3,opt,name=continue_on_failure,json=continueOnFailure,proto3" json:"continue_on_failure,omitempty"`
824	// Optional. Mapping of query variable names to values (equivalent to the
825	// Hive command: `SET name="value";`).
826	ScriptVariables map[string]string `protobuf:"bytes,4,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
827	// Optional. A mapping of property names and values, used to configure Hive.
828	// Properties that conflict with values set by the Cloud Dataproc API may be
829	// overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,
830	// /etc/hive/conf/hive-site.xml, and classes in user code.
831	Properties map[string]string `protobuf:"bytes,5,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
832	// Optional. HCFS URIs of jar files to add to the CLASSPATH of the
833	// Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes
834	// and UDFs.
835	JarFileUris          []string `protobuf:"bytes,6,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
836	XXX_NoUnkeyedLiteral struct{} `json:"-"`
837	XXX_unrecognized     []byte   `json:"-"`
838	XXX_sizecache        int32    `json:"-"`
839}
840
841func (m *HiveJob) Reset()         { *m = HiveJob{} }
842func (m *HiveJob) String() string { return proto.CompactTextString(m) }
843func (*HiveJob) ProtoMessage()    {}
844func (*HiveJob) Descriptor() ([]byte, []int) {
845	return fileDescriptor_20fb118582e1d7de, []int{5}
846}
847
848func (m *HiveJob) XXX_Unmarshal(b []byte) error {
849	return xxx_messageInfo_HiveJob.Unmarshal(m, b)
850}
851func (m *HiveJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
852	return xxx_messageInfo_HiveJob.Marshal(b, m, deterministic)
853}
854func (m *HiveJob) XXX_Merge(src proto.Message) {
855	xxx_messageInfo_HiveJob.Merge(m, src)
856}
857func (m *HiveJob) XXX_Size() int {
858	return xxx_messageInfo_HiveJob.Size(m)
859}
860func (m *HiveJob) XXX_DiscardUnknown() {
861	xxx_messageInfo_HiveJob.DiscardUnknown(m)
862}
863
864var xxx_messageInfo_HiveJob proto.InternalMessageInfo
865
866type isHiveJob_Queries interface {
867	isHiveJob_Queries()
868}
869
870type HiveJob_QueryFileUri struct {
871	QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"`
872}
873
874type HiveJob_QueryList struct {
875	QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"`
876}
877
878func (*HiveJob_QueryFileUri) isHiveJob_Queries() {}
879
880func (*HiveJob_QueryList) isHiveJob_Queries() {}
881
882func (m *HiveJob) GetQueries() isHiveJob_Queries {
883	if m != nil {
884		return m.Queries
885	}
886	return nil
887}
888
889func (m *HiveJob) GetQueryFileUri() string {
890	if x, ok := m.GetQueries().(*HiveJob_QueryFileUri); ok {
891		return x.QueryFileUri
892	}
893	return ""
894}
895
896func (m *HiveJob) GetQueryList() *QueryList {
897	if x, ok := m.GetQueries().(*HiveJob_QueryList); ok {
898		return x.QueryList
899	}
900	return nil
901}
902
903func (m *HiveJob) GetContinueOnFailure() bool {
904	if m != nil {
905		return m.ContinueOnFailure
906	}
907	return false
908}
909
910func (m *HiveJob) GetScriptVariables() map[string]string {
911	if m != nil {
912		return m.ScriptVariables
913	}
914	return nil
915}
916
917func (m *HiveJob) GetProperties() map[string]string {
918	if m != nil {
919		return m.Properties
920	}
921	return nil
922}
923
924func (m *HiveJob) GetJarFileUris() []string {
925	if m != nil {
926		return m.JarFileUris
927	}
928	return nil
929}
930
931// XXX_OneofWrappers is for the internal use of the proto package.
932func (*HiveJob) XXX_OneofWrappers() []interface{} {
933	return []interface{}{
934		(*HiveJob_QueryFileUri)(nil),
935		(*HiveJob_QueryList)(nil),
936	}
937}
938
939// A Cloud Dataproc job for running [Apache Spark
940// SQL](http://spark.apache.org/sql/) queries.
941type SparkSqlJob struct {
942	// Required. The sequence of Spark SQL queries to execute, specified as
943	// either an HCFS file URI or as a list of queries.
944	//
945	// Types that are valid to be assigned to Queries:
946	//	*SparkSqlJob_QueryFileUri
947	//	*SparkSqlJob_QueryList
948	Queries isSparkSqlJob_Queries `protobuf_oneof:"queries"`
949	// Optional. Mapping of query variable names to values (equivalent to the
950	// Spark SQL command: SET `name="value";`).
951	ScriptVariables map[string]string `protobuf:"bytes,3,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
952	// Optional. A mapping of property names to values, used to configure
953	// Spark SQL's SparkConf. Properties that conflict with values set by the
954	// Cloud Dataproc API may be overwritten.
955	Properties map[string]string `protobuf:"bytes,4,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
956	// Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
957	JarFileUris []string `protobuf:"bytes,56,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
958	// Optional. The runtime log config for job execution.
959	LoggingConfig        *LoggingConfig `protobuf:"bytes,6,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
960	XXX_NoUnkeyedLiteral struct{}       `json:"-"`
961	XXX_unrecognized     []byte         `json:"-"`
962	XXX_sizecache        int32          `json:"-"`
963}
964
965func (m *SparkSqlJob) Reset()         { *m = SparkSqlJob{} }
966func (m *SparkSqlJob) String() string { return proto.CompactTextString(m) }
967func (*SparkSqlJob) ProtoMessage()    {}
968func (*SparkSqlJob) Descriptor() ([]byte, []int) {
969	return fileDescriptor_20fb118582e1d7de, []int{6}
970}
971
972func (m *SparkSqlJob) XXX_Unmarshal(b []byte) error {
973	return xxx_messageInfo_SparkSqlJob.Unmarshal(m, b)
974}
975func (m *SparkSqlJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
976	return xxx_messageInfo_SparkSqlJob.Marshal(b, m, deterministic)
977}
978func (m *SparkSqlJob) XXX_Merge(src proto.Message) {
979	xxx_messageInfo_SparkSqlJob.Merge(m, src)
980}
981func (m *SparkSqlJob) XXX_Size() int {
982	return xxx_messageInfo_SparkSqlJob.Size(m)
983}
984func (m *SparkSqlJob) XXX_DiscardUnknown() {
985	xxx_messageInfo_SparkSqlJob.DiscardUnknown(m)
986}
987
988var xxx_messageInfo_SparkSqlJob proto.InternalMessageInfo
989
990type isSparkSqlJob_Queries interface {
991	isSparkSqlJob_Queries()
992}
993
994type SparkSqlJob_QueryFileUri struct {
995	QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"`
996}
997
998type SparkSqlJob_QueryList struct {
999	QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"`
1000}
1001
1002func (*SparkSqlJob_QueryFileUri) isSparkSqlJob_Queries() {}
1003
1004func (*SparkSqlJob_QueryList) isSparkSqlJob_Queries() {}
1005
1006func (m *SparkSqlJob) GetQueries() isSparkSqlJob_Queries {
1007	if m != nil {
1008		return m.Queries
1009	}
1010	return nil
1011}
1012
1013func (m *SparkSqlJob) GetQueryFileUri() string {
1014	if x, ok := m.GetQueries().(*SparkSqlJob_QueryFileUri); ok {
1015		return x.QueryFileUri
1016	}
1017	return ""
1018}
1019
1020func (m *SparkSqlJob) GetQueryList() *QueryList {
1021	if x, ok := m.GetQueries().(*SparkSqlJob_QueryList); ok {
1022		return x.QueryList
1023	}
1024	return nil
1025}
1026
1027func (m *SparkSqlJob) GetScriptVariables() map[string]string {
1028	if m != nil {
1029		return m.ScriptVariables
1030	}
1031	return nil
1032}
1033
1034func (m *SparkSqlJob) GetProperties() map[string]string {
1035	if m != nil {
1036		return m.Properties
1037	}
1038	return nil
1039}
1040
1041func (m *SparkSqlJob) GetJarFileUris() []string {
1042	if m != nil {
1043		return m.JarFileUris
1044	}
1045	return nil
1046}
1047
1048func (m *SparkSqlJob) GetLoggingConfig() *LoggingConfig {
1049	if m != nil {
1050		return m.LoggingConfig
1051	}
1052	return nil
1053}
1054
1055// XXX_OneofWrappers is for the internal use of the proto package.
1056func (*SparkSqlJob) XXX_OneofWrappers() []interface{} {
1057	return []interface{}{
1058		(*SparkSqlJob_QueryFileUri)(nil),
1059		(*SparkSqlJob_QueryList)(nil),
1060	}
1061}
1062
1063// A Cloud Dataproc job for running [Apache Pig](https://pig.apache.org/)
1064// queries on YARN.
1065type PigJob struct {
1066	// Required. The sequence of Pig queries to execute, specified as an HCFS
1067	// file URI or a list of queries.
1068	//
1069	// Types that are valid to be assigned to Queries:
1070	//	*PigJob_QueryFileUri
1071	//	*PigJob_QueryList
1072	Queries isPigJob_Queries `protobuf_oneof:"queries"`
1073	// Optional. Whether to continue executing queries if a query fails.
1074	// The default value is `false`. Setting to `true` can be useful when
1075	// executing independent parallel queries.
1076	ContinueOnFailure bool `protobuf:"varint,3,opt,name=continue_on_failure,json=continueOnFailure,proto3" json:"continue_on_failure,omitempty"`
1077	// Optional. Mapping of query variable names to values (equivalent to the Pig
1078	// command: `name=[value]`).
1079	ScriptVariables map[string]string `protobuf:"bytes,4,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
1080	// Optional. A mapping of property names to values, used to configure Pig.
1081	// Properties that conflict with values set by the Cloud Dataproc API may be
1082	// overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,
1083	// /etc/pig/conf/pig.properties, and classes in user code.
1084	Properties map[string]string `protobuf:"bytes,5,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
1085	// Optional. HCFS URIs of jar files to add to the CLASSPATH of
1086	// the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.
1087	JarFileUris []string `protobuf:"bytes,6,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
1088	// Optional. The runtime log config for job execution.
1089	LoggingConfig        *LoggingConfig `protobuf:"bytes,7,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
1090	XXX_NoUnkeyedLiteral struct{}       `json:"-"`
1091	XXX_unrecognized     []byte         `json:"-"`
1092	XXX_sizecache        int32          `json:"-"`
1093}
1094
1095func (m *PigJob) Reset()         { *m = PigJob{} }
1096func (m *PigJob) String() string { return proto.CompactTextString(m) }
1097func (*PigJob) ProtoMessage()    {}
1098func (*PigJob) Descriptor() ([]byte, []int) {
1099	return fileDescriptor_20fb118582e1d7de, []int{7}
1100}
1101
1102func (m *PigJob) XXX_Unmarshal(b []byte) error {
1103	return xxx_messageInfo_PigJob.Unmarshal(m, b)
1104}
1105func (m *PigJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1106	return xxx_messageInfo_PigJob.Marshal(b, m, deterministic)
1107}
1108func (m *PigJob) XXX_Merge(src proto.Message) {
1109	xxx_messageInfo_PigJob.Merge(m, src)
1110}
1111func (m *PigJob) XXX_Size() int {
1112	return xxx_messageInfo_PigJob.Size(m)
1113}
1114func (m *PigJob) XXX_DiscardUnknown() {
1115	xxx_messageInfo_PigJob.DiscardUnknown(m)
1116}
1117
1118var xxx_messageInfo_PigJob proto.InternalMessageInfo
1119
1120type isPigJob_Queries interface {
1121	isPigJob_Queries()
1122}
1123
1124type PigJob_QueryFileUri struct {
1125	QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"`
1126}
1127
1128type PigJob_QueryList struct {
1129	QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"`
1130}
1131
1132func (*PigJob_QueryFileUri) isPigJob_Queries() {}
1133
1134func (*PigJob_QueryList) isPigJob_Queries() {}
1135
1136func (m *PigJob) GetQueries() isPigJob_Queries {
1137	if m != nil {
1138		return m.Queries
1139	}
1140	return nil
1141}
1142
1143func (m *PigJob) GetQueryFileUri() string {
1144	if x, ok := m.GetQueries().(*PigJob_QueryFileUri); ok {
1145		return x.QueryFileUri
1146	}
1147	return ""
1148}
1149
1150func (m *PigJob) GetQueryList() *QueryList {
1151	if x, ok := m.GetQueries().(*PigJob_QueryList); ok {
1152		return x.QueryList
1153	}
1154	return nil
1155}
1156
1157func (m *PigJob) GetContinueOnFailure() bool {
1158	if m != nil {
1159		return m.ContinueOnFailure
1160	}
1161	return false
1162}
1163
1164func (m *PigJob) GetScriptVariables() map[string]string {
1165	if m != nil {
1166		return m.ScriptVariables
1167	}
1168	return nil
1169}
1170
1171func (m *PigJob) GetProperties() map[string]string {
1172	if m != nil {
1173		return m.Properties
1174	}
1175	return nil
1176}
1177
1178func (m *PigJob) GetJarFileUris() []string {
1179	if m != nil {
1180		return m.JarFileUris
1181	}
1182	return nil
1183}
1184
1185func (m *PigJob) GetLoggingConfig() *LoggingConfig {
1186	if m != nil {
1187		return m.LoggingConfig
1188	}
1189	return nil
1190}
1191
1192// XXX_OneofWrappers is for the internal use of the proto package.
1193func (*PigJob) XXX_OneofWrappers() []interface{} {
1194	return []interface{}{
1195		(*PigJob_QueryFileUri)(nil),
1196		(*PigJob_QueryList)(nil),
1197	}
1198}
1199
1200// A Cloud Dataproc job for running
1201// [Apache SparkR](https://spark.apache.org/docs/latest/sparkr.html)
1202// applications on YARN.
1203type SparkRJob struct {
1204	// Required. The HCFS URI of the main R file to use as the driver.
1205	// Must be a .R file.
1206	MainRFileUri string `protobuf:"bytes,1,opt,name=main_r_file_uri,json=mainRFileUri,proto3" json:"main_r_file_uri,omitempty"`
1207	// Optional. The arguments to pass to the driver.  Do not include arguments,
1208	// such as `--conf`, that can be set as job properties, since a collision may
1209	// occur that causes an incorrect job submission.
1210	Args []string `protobuf:"bytes,2,rep,name=args,proto3" json:"args,omitempty"`
1211	// Optional. HCFS URIs of files to be copied to the working directory of
1212	// R drivers and distributed tasks. Useful for naively parallel tasks.
1213	FileUris []string `protobuf:"bytes,3,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"`
1214	// Optional. HCFS URIs of archives to be extracted in the working directory of
1215	// Spark drivers and tasks. Supported file types:
1216	// .jar, .tar, .tar.gz, .tgz, and .zip.
1217	ArchiveUris []string `protobuf:"bytes,4,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"`
1218	// Optional. A mapping of property names to values, used to configure SparkR.
1219	// Properties that conflict with values set by the Cloud Dataproc API may be
1220	// overwritten. Can include properties set in
1221	// /etc/spark/conf/spark-defaults.conf and classes in user code.
1222	Properties map[string]string `protobuf:"bytes,5,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
1223	// Optional. The runtime log config for job execution.
1224	LoggingConfig        *LoggingConfig `protobuf:"bytes,6,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
1225	XXX_NoUnkeyedLiteral struct{}       `json:"-"`
1226	XXX_unrecognized     []byte         `json:"-"`
1227	XXX_sizecache        int32          `json:"-"`
1228}
1229
1230func (m *SparkRJob) Reset()         { *m = SparkRJob{} }
1231func (m *SparkRJob) String() string { return proto.CompactTextString(m) }
1232func (*SparkRJob) ProtoMessage()    {}
1233func (*SparkRJob) Descriptor() ([]byte, []int) {
1234	return fileDescriptor_20fb118582e1d7de, []int{8}
1235}
1236
1237func (m *SparkRJob) XXX_Unmarshal(b []byte) error {
1238	return xxx_messageInfo_SparkRJob.Unmarshal(m, b)
1239}
1240func (m *SparkRJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1241	return xxx_messageInfo_SparkRJob.Marshal(b, m, deterministic)
1242}
1243func (m *SparkRJob) XXX_Merge(src proto.Message) {
1244	xxx_messageInfo_SparkRJob.Merge(m, src)
1245}
1246func (m *SparkRJob) XXX_Size() int {
1247	return xxx_messageInfo_SparkRJob.Size(m)
1248}
1249func (m *SparkRJob) XXX_DiscardUnknown() {
1250	xxx_messageInfo_SparkRJob.DiscardUnknown(m)
1251}
1252
1253var xxx_messageInfo_SparkRJob proto.InternalMessageInfo
1254
1255func (m *SparkRJob) GetMainRFileUri() string {
1256	if m != nil {
1257		return m.MainRFileUri
1258	}
1259	return ""
1260}
1261
1262func (m *SparkRJob) GetArgs() []string {
1263	if m != nil {
1264		return m.Args
1265	}
1266	return nil
1267}
1268
1269func (m *SparkRJob) GetFileUris() []string {
1270	if m != nil {
1271		return m.FileUris
1272	}
1273	return nil
1274}
1275
1276func (m *SparkRJob) GetArchiveUris() []string {
1277	if m != nil {
1278		return m.ArchiveUris
1279	}
1280	return nil
1281}
1282
1283func (m *SparkRJob) GetProperties() map[string]string {
1284	if m != nil {
1285		return m.Properties
1286	}
1287	return nil
1288}
1289
1290func (m *SparkRJob) GetLoggingConfig() *LoggingConfig {
1291	if m != nil {
1292		return m.LoggingConfig
1293	}
1294	return nil
1295}
1296
1297// Cloud Dataproc job config.
1298type JobPlacement struct {
1299	// Required. The name of the cluster where the job will be submitted.
1300	ClusterName string `protobuf:"bytes,1,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"`
1301	// Output only. A cluster UUID generated by the Cloud Dataproc service when
1302	// the job is submitted.
1303	ClusterUuid          string   `protobuf:"bytes,2,opt,name=cluster_uuid,json=clusterUuid,proto3" json:"cluster_uuid,omitempty"`
1304	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1305	XXX_unrecognized     []byte   `json:"-"`
1306	XXX_sizecache        int32    `json:"-"`
1307}
1308
1309func (m *JobPlacement) Reset()         { *m = JobPlacement{} }
1310func (m *JobPlacement) String() string { return proto.CompactTextString(m) }
1311func (*JobPlacement) ProtoMessage()    {}
1312func (*JobPlacement) Descriptor() ([]byte, []int) {
1313	return fileDescriptor_20fb118582e1d7de, []int{9}
1314}
1315
1316func (m *JobPlacement) XXX_Unmarshal(b []byte) error {
1317	return xxx_messageInfo_JobPlacement.Unmarshal(m, b)
1318}
1319func (m *JobPlacement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1320	return xxx_messageInfo_JobPlacement.Marshal(b, m, deterministic)
1321}
1322func (m *JobPlacement) XXX_Merge(src proto.Message) {
1323	xxx_messageInfo_JobPlacement.Merge(m, src)
1324}
1325func (m *JobPlacement) XXX_Size() int {
1326	return xxx_messageInfo_JobPlacement.Size(m)
1327}
1328func (m *JobPlacement) XXX_DiscardUnknown() {
1329	xxx_messageInfo_JobPlacement.DiscardUnknown(m)
1330}
1331
1332var xxx_messageInfo_JobPlacement proto.InternalMessageInfo
1333
1334func (m *JobPlacement) GetClusterName() string {
1335	if m != nil {
1336		return m.ClusterName
1337	}
1338	return ""
1339}
1340
1341func (m *JobPlacement) GetClusterUuid() string {
1342	if m != nil {
1343		return m.ClusterUuid
1344	}
1345	return ""
1346}
1347
1348// Cloud Dataproc job status.
1349type JobStatus struct {
1350	// Output only. A state message specifying the overall job state.
1351	State JobStatus_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.dataproc.v1beta2.JobStatus_State" json:"state,omitempty"`
1352	// Output only. Optional job state details, such as an error
1353	// description if the state is <code>ERROR</code>.
1354	Details string `protobuf:"bytes,2,opt,name=details,proto3" json:"details,omitempty"`
1355	// Output only. The time when this state was entered.
1356	StateStartTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=state_start_time,json=stateStartTime,proto3" json:"state_start_time,omitempty"`
1357	// Output only. Additional state information, which includes
1358	// status reported by the agent.
1359	Substate             JobStatus_Substate `protobuf:"varint,7,opt,name=substate,proto3,enum=google.cloud.dataproc.v1beta2.JobStatus_Substate" json:"substate,omitempty"`
1360	XXX_NoUnkeyedLiteral struct{}           `json:"-"`
1361	XXX_unrecognized     []byte             `json:"-"`
1362	XXX_sizecache        int32              `json:"-"`
1363}
1364
1365func (m *JobStatus) Reset()         { *m = JobStatus{} }
1366func (m *JobStatus) String() string { return proto.CompactTextString(m) }
1367func (*JobStatus) ProtoMessage()    {}
1368func (*JobStatus) Descriptor() ([]byte, []int) {
1369	return fileDescriptor_20fb118582e1d7de, []int{10}
1370}
1371
1372func (m *JobStatus) XXX_Unmarshal(b []byte) error {
1373	return xxx_messageInfo_JobStatus.Unmarshal(m, b)
1374}
1375func (m *JobStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1376	return xxx_messageInfo_JobStatus.Marshal(b, m, deterministic)
1377}
1378func (m *JobStatus) XXX_Merge(src proto.Message) {
1379	xxx_messageInfo_JobStatus.Merge(m, src)
1380}
1381func (m *JobStatus) XXX_Size() int {
1382	return xxx_messageInfo_JobStatus.Size(m)
1383}
1384func (m *JobStatus) XXX_DiscardUnknown() {
1385	xxx_messageInfo_JobStatus.DiscardUnknown(m)
1386}
1387
1388var xxx_messageInfo_JobStatus proto.InternalMessageInfo
1389
1390func (m *JobStatus) GetState() JobStatus_State {
1391	if m != nil {
1392		return m.State
1393	}
1394	return JobStatus_STATE_UNSPECIFIED
1395}
1396
1397func (m *JobStatus) GetDetails() string {
1398	if m != nil {
1399		return m.Details
1400	}
1401	return ""
1402}
1403
1404func (m *JobStatus) GetStateStartTime() *timestamp.Timestamp {
1405	if m != nil {
1406		return m.StateStartTime
1407	}
1408	return nil
1409}
1410
1411func (m *JobStatus) GetSubstate() JobStatus_Substate {
1412	if m != nil {
1413		return m.Substate
1414	}
1415	return JobStatus_UNSPECIFIED
1416}
1417
1418// Encapsulates the full scoping used to reference a job.
1419type JobReference struct {
1420	// Required. The ID of the Google Cloud Platform project that the job
1421	// belongs to.
1422	ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
1423	// Optional. The job ID, which must be unique within the project.
1424	//
1425	// The ID must contain only letters (a-z, A-Z), numbers (0-9),
1426	// underscores (_), or hyphens (-). The maximum length is 100 characters.
1427	//
1428	// If not specified by the caller, the job ID will be provided by the server.
1429	JobId                string   `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
1430	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1431	XXX_unrecognized     []byte   `json:"-"`
1432	XXX_sizecache        int32    `json:"-"`
1433}
1434
1435func (m *JobReference) Reset()         { *m = JobReference{} }
1436func (m *JobReference) String() string { return proto.CompactTextString(m) }
1437func (*JobReference) ProtoMessage()    {}
1438func (*JobReference) Descriptor() ([]byte, []int) {
1439	return fileDescriptor_20fb118582e1d7de, []int{11}
1440}
1441
1442func (m *JobReference) XXX_Unmarshal(b []byte) error {
1443	return xxx_messageInfo_JobReference.Unmarshal(m, b)
1444}
1445func (m *JobReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1446	return xxx_messageInfo_JobReference.Marshal(b, m, deterministic)
1447}
1448func (m *JobReference) XXX_Merge(src proto.Message) {
1449	xxx_messageInfo_JobReference.Merge(m, src)
1450}
1451func (m *JobReference) XXX_Size() int {
1452	return xxx_messageInfo_JobReference.Size(m)
1453}
1454func (m *JobReference) XXX_DiscardUnknown() {
1455	xxx_messageInfo_JobReference.DiscardUnknown(m)
1456}
1457
1458var xxx_messageInfo_JobReference proto.InternalMessageInfo
1459
1460func (m *JobReference) GetProjectId() string {
1461	if m != nil {
1462		return m.ProjectId
1463	}
1464	return ""
1465}
1466
1467func (m *JobReference) GetJobId() string {
1468	if m != nil {
1469		return m.JobId
1470	}
1471	return ""
1472}
1473
1474// A YARN application created by a job. Application information is a subset of
1475// <code>org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto</code>.
1476//
1477// **Beta Feature**: This report is available for testing purposes only. It may
1478// be changed before final release.
1479type YarnApplication struct {
1480	// Required. The application name.
1481	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
1482	// Required. The application state.
1483	State YarnApplication_State `protobuf:"varint,2,opt,name=state,proto3,enum=google.cloud.dataproc.v1beta2.YarnApplication_State" json:"state,omitempty"`
1484	// Required. The numerical progress of the application, from 1 to 100.
1485	Progress float32 `protobuf:"fixed32,3,opt,name=progress,proto3" json:"progress,omitempty"`
1486	// Optional. The HTTP URL of the ApplicationMaster, HistoryServer, or
1487	// TimelineServer that provides application-specific information. The URL uses
1488	// the internal hostname, and requires a proxy server for resolution and,
1489	// possibly, access.
1490	TrackingUrl          string   `protobuf:"bytes,4,opt,name=tracking_url,json=trackingUrl,proto3" json:"tracking_url,omitempty"`
1491	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1492	XXX_unrecognized     []byte   `json:"-"`
1493	XXX_sizecache        int32    `json:"-"`
1494}
1495
1496func (m *YarnApplication) Reset()         { *m = YarnApplication{} }
1497func (m *YarnApplication) String() string { return proto.CompactTextString(m) }
1498func (*YarnApplication) ProtoMessage()    {}
1499func (*YarnApplication) Descriptor() ([]byte, []int) {
1500	return fileDescriptor_20fb118582e1d7de, []int{12}
1501}
1502
1503func (m *YarnApplication) XXX_Unmarshal(b []byte) error {
1504	return xxx_messageInfo_YarnApplication.Unmarshal(m, b)
1505}
1506func (m *YarnApplication) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1507	return xxx_messageInfo_YarnApplication.Marshal(b, m, deterministic)
1508}
1509func (m *YarnApplication) XXX_Merge(src proto.Message) {
1510	xxx_messageInfo_YarnApplication.Merge(m, src)
1511}
1512func (m *YarnApplication) XXX_Size() int {
1513	return xxx_messageInfo_YarnApplication.Size(m)
1514}
1515func (m *YarnApplication) XXX_DiscardUnknown() {
1516	xxx_messageInfo_YarnApplication.DiscardUnknown(m)
1517}
1518
1519var xxx_messageInfo_YarnApplication proto.InternalMessageInfo
1520
1521func (m *YarnApplication) GetName() string {
1522	if m != nil {
1523		return m.Name
1524	}
1525	return ""
1526}
1527
1528func (m *YarnApplication) GetState() YarnApplication_State {
1529	if m != nil {
1530		return m.State
1531	}
1532	return YarnApplication_STATE_UNSPECIFIED
1533}
1534
1535func (m *YarnApplication) GetProgress() float32 {
1536	if m != nil {
1537		return m.Progress
1538	}
1539	return 0
1540}
1541
1542func (m *YarnApplication) GetTrackingUrl() string {
1543	if m != nil {
1544		return m.TrackingUrl
1545	}
1546	return ""
1547}
1548
1549// A Cloud Dataproc job resource.
1550type Job struct {
1551	// Optional. The fully qualified reference to the job, which can be used to
1552	// obtain the equivalent REST path of the job resource. If this property
1553	// is not specified when a job is created, the server generates a
1554	// <code>job_id</code>.
1555	Reference *JobReference `protobuf:"bytes,1,opt,name=reference,proto3" json:"reference,omitempty"`
1556	// Required. Job information, including how, when, and where to
1557	// run the job.
1558	Placement *JobPlacement `protobuf:"bytes,2,opt,name=placement,proto3" json:"placement,omitempty"`
1559	// Required. The application/framework-specific portion of the job.
1560	//
1561	// Types that are valid to be assigned to TypeJob:
1562	//	*Job_HadoopJob
1563	//	*Job_SparkJob
1564	//	*Job_PysparkJob
1565	//	*Job_HiveJob
1566	//	*Job_PigJob
1567	//	*Job_SparkRJob
1568	//	*Job_SparkSqlJob
1569	TypeJob isJob_TypeJob `protobuf_oneof:"type_job"`
1570	// Output only. The job status. Additional application-specific
1571	// status information may be contained in the <code>type_job</code>
1572	// and <code>yarn_applications</code> fields.
1573	Status *JobStatus `protobuf:"bytes,8,opt,name=status,proto3" json:"status,omitempty"`
1574	// Output only. The previous job status.
1575	StatusHistory []*JobStatus `protobuf:"bytes,13,rep,name=status_history,json=statusHistory,proto3" json:"status_history,omitempty"`
1576	// Output only. The collection of YARN applications spun up by this job.
1577	//
1578	// **Beta** Feature: This report is available for testing purposes only. It
1579	// may be changed before final release.
1580	YarnApplications []*YarnApplication `protobuf:"bytes,9,rep,name=yarn_applications,json=yarnApplications,proto3" json:"yarn_applications,omitempty"`
1581	// Output only. The email address of the user submitting the job. For jobs
1582	// submitted on the cluster, the address is <code>username@hostname</code>.
1583	SubmittedBy string `protobuf:"bytes,10,opt,name=submitted_by,json=submittedBy,proto3" json:"submitted_by,omitempty"`
1584	// Output only. A URI pointing to the location of the stdout of the job's
1585	// driver program.
1586	DriverOutputResourceUri string `protobuf:"bytes,17,opt,name=driver_output_resource_uri,json=driverOutputResourceUri,proto3" json:"driver_output_resource_uri,omitempty"`
1587	// Output only. If present, the location of miscellaneous control files
1588	// which may be used as part of job setup and handling. If not present,
1589	// control files may be placed in the same location as `driver_output_uri`.
1590	DriverControlFilesUri string `protobuf:"bytes,15,opt,name=driver_control_files_uri,json=driverControlFilesUri,proto3" json:"driver_control_files_uri,omitempty"`
1591	// Optional. The labels to associate with this job.
1592	// Label **keys** must contain 1 to 63 characters, and must conform to
1593	// [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
1594	// Label **values** may be empty, but, if present, must contain 1 to 63
1595	// characters, and must conform to [RFC
1596	// 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
1597	// associated with a job.
1598	Labels map[string]string `protobuf:"bytes,18,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
1599	// Optional. Job scheduling configuration.
1600	Scheduling *JobScheduling `protobuf:"bytes,20,opt,name=scheduling,proto3" json:"scheduling,omitempty"`
1601	// Output only. A UUID that uniquely identifies a job within the project
1602	// over time. This is in contrast to a user-settable reference.job_id that
1603	// may be reused over time.
1604	JobUuid              string   `protobuf:"bytes,22,opt,name=job_uuid,json=jobUuid,proto3" json:"job_uuid,omitempty"`
1605	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1606	XXX_unrecognized     []byte   `json:"-"`
1607	XXX_sizecache        int32    `json:"-"`
1608}
1609
1610func (m *Job) Reset()         { *m = Job{} }
1611func (m *Job) String() string { return proto.CompactTextString(m) }
1612func (*Job) ProtoMessage()    {}
1613func (*Job) Descriptor() ([]byte, []int) {
1614	return fileDescriptor_20fb118582e1d7de, []int{13}
1615}
1616
1617func (m *Job) XXX_Unmarshal(b []byte) error {
1618	return xxx_messageInfo_Job.Unmarshal(m, b)
1619}
1620func (m *Job) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1621	return xxx_messageInfo_Job.Marshal(b, m, deterministic)
1622}
1623func (m *Job) XXX_Merge(src proto.Message) {
1624	xxx_messageInfo_Job.Merge(m, src)
1625}
1626func (m *Job) XXX_Size() int {
1627	return xxx_messageInfo_Job.Size(m)
1628}
1629func (m *Job) XXX_DiscardUnknown() {
1630	xxx_messageInfo_Job.DiscardUnknown(m)
1631}
1632
1633var xxx_messageInfo_Job proto.InternalMessageInfo
1634
1635func (m *Job) GetReference() *JobReference {
1636	if m != nil {
1637		return m.Reference
1638	}
1639	return nil
1640}
1641
1642func (m *Job) GetPlacement() *JobPlacement {
1643	if m != nil {
1644		return m.Placement
1645	}
1646	return nil
1647}
1648
1649type isJob_TypeJob interface {
1650	isJob_TypeJob()
1651}
1652
1653type Job_HadoopJob struct {
1654	HadoopJob *HadoopJob `protobuf:"bytes,3,opt,name=hadoop_job,json=hadoopJob,proto3,oneof"`
1655}
1656
1657type Job_SparkJob struct {
1658	SparkJob *SparkJob `protobuf:"bytes,4,opt,name=spark_job,json=sparkJob,proto3,oneof"`
1659}
1660
1661type Job_PysparkJob struct {
1662	PysparkJob *PySparkJob `protobuf:"bytes,5,opt,name=pyspark_job,json=pysparkJob,proto3,oneof"`
1663}
1664
1665type Job_HiveJob struct {
1666	HiveJob *HiveJob `protobuf:"bytes,6,opt,name=hive_job,json=hiveJob,proto3,oneof"`
1667}
1668
1669type Job_PigJob struct {
1670	PigJob *PigJob `protobuf:"bytes,7,opt,name=pig_job,json=pigJob,proto3,oneof"`
1671}
1672
1673type Job_SparkRJob struct {
1674	SparkRJob *SparkRJob `protobuf:"bytes,21,opt,name=spark_r_job,json=sparkRJob,proto3,oneof"`
1675}
1676
1677type Job_SparkSqlJob struct {
1678	SparkSqlJob *SparkSqlJob `protobuf:"bytes,12,opt,name=spark_sql_job,json=sparkSqlJob,proto3,oneof"`
1679}
1680
1681func (*Job_HadoopJob) isJob_TypeJob() {}
1682
1683func (*Job_SparkJob) isJob_TypeJob() {}
1684
1685func (*Job_PysparkJob) isJob_TypeJob() {}
1686
1687func (*Job_HiveJob) isJob_TypeJob() {}
1688
1689func (*Job_PigJob) isJob_TypeJob() {}
1690
1691func (*Job_SparkRJob) isJob_TypeJob() {}
1692
1693func (*Job_SparkSqlJob) isJob_TypeJob() {}
1694
1695func (m *Job) GetTypeJob() isJob_TypeJob {
1696	if m != nil {
1697		return m.TypeJob
1698	}
1699	return nil
1700}
1701
1702func (m *Job) GetHadoopJob() *HadoopJob {
1703	if x, ok := m.GetTypeJob().(*Job_HadoopJob); ok {
1704		return x.HadoopJob
1705	}
1706	return nil
1707}
1708
1709func (m *Job) GetSparkJob() *SparkJob {
1710	if x, ok := m.GetTypeJob().(*Job_SparkJob); ok {
1711		return x.SparkJob
1712	}
1713	return nil
1714}
1715
1716func (m *Job) GetPysparkJob() *PySparkJob {
1717	if x, ok := m.GetTypeJob().(*Job_PysparkJob); ok {
1718		return x.PysparkJob
1719	}
1720	return nil
1721}
1722
1723func (m *Job) GetHiveJob() *HiveJob {
1724	if x, ok := m.GetTypeJob().(*Job_HiveJob); ok {
1725		return x.HiveJob
1726	}
1727	return nil
1728}
1729
1730func (m *Job) GetPigJob() *PigJob {
1731	if x, ok := m.GetTypeJob().(*Job_PigJob); ok {
1732		return x.PigJob
1733	}
1734	return nil
1735}
1736
1737func (m *Job) GetSparkRJob() *SparkRJob {
1738	if x, ok := m.GetTypeJob().(*Job_SparkRJob); ok {
1739		return x.SparkRJob
1740	}
1741	return nil
1742}
1743
1744func (m *Job) GetSparkSqlJob() *SparkSqlJob {
1745	if x, ok := m.GetTypeJob().(*Job_SparkSqlJob); ok {
1746		return x.SparkSqlJob
1747	}
1748	return nil
1749}
1750
1751func (m *Job) GetStatus() *JobStatus {
1752	if m != nil {
1753		return m.Status
1754	}
1755	return nil
1756}
1757
1758func (m *Job) GetStatusHistory() []*JobStatus {
1759	if m != nil {
1760		return m.StatusHistory
1761	}
1762	return nil
1763}
1764
1765func (m *Job) GetYarnApplications() []*YarnApplication {
1766	if m != nil {
1767		return m.YarnApplications
1768	}
1769	return nil
1770}
1771
1772func (m *Job) GetSubmittedBy() string {
1773	if m != nil {
1774		return m.SubmittedBy
1775	}
1776	return ""
1777}
1778
1779func (m *Job) GetDriverOutputResourceUri() string {
1780	if m != nil {
1781		return m.DriverOutputResourceUri
1782	}
1783	return ""
1784}
1785
1786func (m *Job) GetDriverControlFilesUri() string {
1787	if m != nil {
1788		return m.DriverControlFilesUri
1789	}
1790	return ""
1791}
1792
1793func (m *Job) GetLabels() map[string]string {
1794	if m != nil {
1795		return m.Labels
1796	}
1797	return nil
1798}
1799
1800func (m *Job) GetScheduling() *JobScheduling {
1801	if m != nil {
1802		return m.Scheduling
1803	}
1804	return nil
1805}
1806
1807func (m *Job) GetJobUuid() string {
1808	if m != nil {
1809		return m.JobUuid
1810	}
1811	return ""
1812}
1813
1814// XXX_OneofWrappers is for the internal use of the proto package.
1815func (*Job) XXX_OneofWrappers() []interface{} {
1816	return []interface{}{
1817		(*Job_HadoopJob)(nil),
1818		(*Job_SparkJob)(nil),
1819		(*Job_PysparkJob)(nil),
1820		(*Job_HiveJob)(nil),
1821		(*Job_PigJob)(nil),
1822		(*Job_SparkRJob)(nil),
1823		(*Job_SparkSqlJob)(nil),
1824	}
1825}
1826
1827// Job scheduling options.
1828type JobScheduling struct {
1829	// Optional. Maximum number of times per hour a driver may be restarted as
1830	// a result of driver terminating with non-zero code before job is
1831	// reported failed.
1832	//
1833	// A job may be reported as thrashing if driver exits with non-zero code
1834	// 4 times within 10 minute window.
1835	//
1836	// Maximum value is 10.
1837	MaxFailuresPerHour   int32    `protobuf:"varint,1,opt,name=max_failures_per_hour,json=maxFailuresPerHour,proto3" json:"max_failures_per_hour,omitempty"`
1838	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1839	XXX_unrecognized     []byte   `json:"-"`
1840	XXX_sizecache        int32    `json:"-"`
1841}
1842
1843func (m *JobScheduling) Reset()         { *m = JobScheduling{} }
1844func (m *JobScheduling) String() string { return proto.CompactTextString(m) }
1845func (*JobScheduling) ProtoMessage()    {}
1846func (*JobScheduling) Descriptor() ([]byte, []int) {
1847	return fileDescriptor_20fb118582e1d7de, []int{14}
1848}
1849
1850func (m *JobScheduling) XXX_Unmarshal(b []byte) error {
1851	return xxx_messageInfo_JobScheduling.Unmarshal(m, b)
1852}
1853func (m *JobScheduling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1854	return xxx_messageInfo_JobScheduling.Marshal(b, m, deterministic)
1855}
1856func (m *JobScheduling) XXX_Merge(src proto.Message) {
1857	xxx_messageInfo_JobScheduling.Merge(m, src)
1858}
1859func (m *JobScheduling) XXX_Size() int {
1860	return xxx_messageInfo_JobScheduling.Size(m)
1861}
1862func (m *JobScheduling) XXX_DiscardUnknown() {
1863	xxx_messageInfo_JobScheduling.DiscardUnknown(m)
1864}
1865
1866var xxx_messageInfo_JobScheduling proto.InternalMessageInfo
1867
1868func (m *JobScheduling) GetMaxFailuresPerHour() int32 {
1869	if m != nil {
1870		return m.MaxFailuresPerHour
1871	}
1872	return 0
1873}
1874
1875// A request to submit a job.
1876type SubmitJobRequest struct {
1877	// Required. The ID of the Google Cloud Platform project that the job
1878	// belongs to.
1879	ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
1880	// Required. The Cloud Dataproc region in which to handle the request.
1881	Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
1882	// Required. The job resource.
1883	Job *Job `protobuf:"bytes,2,opt,name=job,proto3" json:"job,omitempty"`
1884	// Optional. A unique id used to identify the request. If the server
1885	// receives two [SubmitJobRequest][google.cloud.dataproc.v1beta2.SubmitJobRequest] requests  with the same
1886	// id, then the second request will be ignored and the
1887	// first [Job][google.cloud.dataproc.v1beta2.Job] created and stored in the backend
1888	// is returned.
1889	//
1890	// It is recommended to always set this value to a
1891	// [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
1892	//
1893	// The id must contain only letters (a-z, A-Z), numbers (0-9),
1894	// underscores (_), and hyphens (-). The maximum length is 40 characters.
1895	RequestId            string   `protobuf:"bytes,4,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"`
1896	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1897	XXX_unrecognized     []byte   `json:"-"`
1898	XXX_sizecache        int32    `json:"-"`
1899}
1900
1901func (m *SubmitJobRequest) Reset()         { *m = SubmitJobRequest{} }
1902func (m *SubmitJobRequest) String() string { return proto.CompactTextString(m) }
1903func (*SubmitJobRequest) ProtoMessage()    {}
1904func (*SubmitJobRequest) Descriptor() ([]byte, []int) {
1905	return fileDescriptor_20fb118582e1d7de, []int{15}
1906}
1907
1908func (m *SubmitJobRequest) XXX_Unmarshal(b []byte) error {
1909	return xxx_messageInfo_SubmitJobRequest.Unmarshal(m, b)
1910}
1911func (m *SubmitJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1912	return xxx_messageInfo_SubmitJobRequest.Marshal(b, m, deterministic)
1913}
1914func (m *SubmitJobRequest) XXX_Merge(src proto.Message) {
1915	xxx_messageInfo_SubmitJobRequest.Merge(m, src)
1916}
1917func (m *SubmitJobRequest) XXX_Size() int {
1918	return xxx_messageInfo_SubmitJobRequest.Size(m)
1919}
1920func (m *SubmitJobRequest) XXX_DiscardUnknown() {
1921	xxx_messageInfo_SubmitJobRequest.DiscardUnknown(m)
1922}
1923
1924var xxx_messageInfo_SubmitJobRequest proto.InternalMessageInfo
1925
1926func (m *SubmitJobRequest) GetProjectId() string {
1927	if m != nil {
1928		return m.ProjectId
1929	}
1930	return ""
1931}
1932
1933func (m *SubmitJobRequest) GetRegion() string {
1934	if m != nil {
1935		return m.Region
1936	}
1937	return ""
1938}
1939
1940func (m *SubmitJobRequest) GetJob() *Job {
1941	if m != nil {
1942		return m.Job
1943	}
1944	return nil
1945}
1946
1947func (m *SubmitJobRequest) GetRequestId() string {
1948	if m != nil {
1949		return m.RequestId
1950	}
1951	return ""
1952}
1953
1954// A request to get the resource representation for a job in a project.
1955type GetJobRequest struct {
1956	// Required. The ID of the Google Cloud Platform project that the job
1957	// belongs to.
1958	ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
1959	// Required. The Cloud Dataproc region in which to handle the request.
1960	Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
1961	// Required. The job ID.
1962	JobId                string   `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
1963	XXX_NoUnkeyedLiteral struct{} `json:"-"`
1964	XXX_unrecognized     []byte   `json:"-"`
1965	XXX_sizecache        int32    `json:"-"`
1966}
1967
1968func (m *GetJobRequest) Reset()         { *m = GetJobRequest{} }
1969func (m *GetJobRequest) String() string { return proto.CompactTextString(m) }
1970func (*GetJobRequest) ProtoMessage()    {}
1971func (*GetJobRequest) Descriptor() ([]byte, []int) {
1972	return fileDescriptor_20fb118582e1d7de, []int{16}
1973}
1974
1975func (m *GetJobRequest) XXX_Unmarshal(b []byte) error {
1976	return xxx_messageInfo_GetJobRequest.Unmarshal(m, b)
1977}
1978func (m *GetJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
1979	return xxx_messageInfo_GetJobRequest.Marshal(b, m, deterministic)
1980}
1981func (m *GetJobRequest) XXX_Merge(src proto.Message) {
1982	xxx_messageInfo_GetJobRequest.Merge(m, src)
1983}
1984func (m *GetJobRequest) XXX_Size() int {
1985	return xxx_messageInfo_GetJobRequest.Size(m)
1986}
1987func (m *GetJobRequest) XXX_DiscardUnknown() {
1988	xxx_messageInfo_GetJobRequest.DiscardUnknown(m)
1989}
1990
1991var xxx_messageInfo_GetJobRequest proto.InternalMessageInfo
1992
1993func (m *GetJobRequest) GetProjectId() string {
1994	if m != nil {
1995		return m.ProjectId
1996	}
1997	return ""
1998}
1999
2000func (m *GetJobRequest) GetRegion() string {
2001	if m != nil {
2002		return m.Region
2003	}
2004	return ""
2005}
2006
2007func (m *GetJobRequest) GetJobId() string {
2008	if m != nil {
2009		return m.JobId
2010	}
2011	return ""
2012}
2013
2014// A request to list jobs in a project.
2015type ListJobsRequest struct {
2016	// Required. The ID of the Google Cloud Platform project that the job
2017	// belongs to.
2018	ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
2019	// Required. The Cloud Dataproc region in which to handle the request.
2020	Region string `protobuf:"bytes,6,opt,name=region,proto3" json:"region,omitempty"`
2021	// Optional. The number of results to return in each response.
2022	PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
2023	// Optional. The page token, returned by a previous call, to request the
2024	// next page of results.
2025	PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
2026	// Optional. If set, the returned jobs list includes only jobs that were
2027	// submitted to the named cluster.
2028	ClusterName string `protobuf:"bytes,4,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"`
2029	// Optional. Specifies enumerated categories of jobs to list.
2030	// (default = match ALL jobs).
2031	//
2032	// If `filter` is provided, `jobStateMatcher` will be ignored.
2033	JobStateMatcher ListJobsRequest_JobStateMatcher `protobuf:"varint,5,opt,name=job_state_matcher,json=jobStateMatcher,proto3,enum=google.cloud.dataproc.v1beta2.ListJobsRequest_JobStateMatcher" json:"job_state_matcher,omitempty"`
2034	// Optional. A filter constraining the jobs to list. Filters are
2035	// case-sensitive and have the following syntax:
2036	//
2037	// [field = value] AND [field [= value]] ...
2038	//
2039	// where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label
2040	// key. **value** can be `*` to match all values.
2041	// `status.state` can be either `ACTIVE` or `NON_ACTIVE`.
2042	// Only the logical `AND` operator is supported; space-separated items are
2043	// treated as having an implicit `AND` operator.
2044	//
2045	// Example filter:
2046	//
2047	// status.state = ACTIVE AND labels.env = staging AND labels.starred = *
2048	Filter               string   `protobuf:"bytes,7,opt,name=filter,proto3" json:"filter,omitempty"`
2049	XXX_NoUnkeyedLiteral struct{} `json:"-"`
2050	XXX_unrecognized     []byte   `json:"-"`
2051	XXX_sizecache        int32    `json:"-"`
2052}
2053
2054func (m *ListJobsRequest) Reset()         { *m = ListJobsRequest{} }
2055func (m *ListJobsRequest) String() string { return proto.CompactTextString(m) }
2056func (*ListJobsRequest) ProtoMessage()    {}
2057func (*ListJobsRequest) Descriptor() ([]byte, []int) {
2058	return fileDescriptor_20fb118582e1d7de, []int{17}
2059}
2060
2061func (m *ListJobsRequest) XXX_Unmarshal(b []byte) error {
2062	return xxx_messageInfo_ListJobsRequest.Unmarshal(m, b)
2063}
2064func (m *ListJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
2065	return xxx_messageInfo_ListJobsRequest.Marshal(b, m, deterministic)
2066}
2067func (m *ListJobsRequest) XXX_Merge(src proto.Message) {
2068	xxx_messageInfo_ListJobsRequest.Merge(m, src)
2069}
2070func (m *ListJobsRequest) XXX_Size() int {
2071	return xxx_messageInfo_ListJobsRequest.Size(m)
2072}
2073func (m *ListJobsRequest) XXX_DiscardUnknown() {
2074	xxx_messageInfo_ListJobsRequest.DiscardUnknown(m)
2075}
2076
2077var xxx_messageInfo_ListJobsRequest proto.InternalMessageInfo
2078
2079func (m *ListJobsRequest) GetProjectId() string {
2080	if m != nil {
2081		return m.ProjectId
2082	}
2083	return ""
2084}
2085
2086func (m *ListJobsRequest) GetRegion() string {
2087	if m != nil {
2088		return m.Region
2089	}
2090	return ""
2091}
2092
2093func (m *ListJobsRequest) GetPageSize() int32 {
2094	if m != nil {
2095		return m.PageSize
2096	}
2097	return 0
2098}
2099
2100func (m *ListJobsRequest) GetPageToken() string {
2101	if m != nil {
2102		return m.PageToken
2103	}
2104	return ""
2105}
2106
2107func (m *ListJobsRequest) GetClusterName() string {
2108	if m != nil {
2109		return m.ClusterName
2110	}
2111	return ""
2112}
2113
2114func (m *ListJobsRequest) GetJobStateMatcher() ListJobsRequest_JobStateMatcher {
2115	if m != nil {
2116		return m.JobStateMatcher
2117	}
2118	return ListJobsRequest_ALL
2119}
2120
2121func (m *ListJobsRequest) GetFilter() string {
2122	if m != nil {
2123		return m.Filter
2124	}
2125	return ""
2126}
2127
2128// A request to update a job.
2129type UpdateJobRequest struct {
2130	// Required. The ID of the Google Cloud Platform project that the job
2131	// belongs to.
2132	ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
2133	// Required. The Cloud Dataproc region in which to handle the request.
2134	Region string `protobuf:"bytes,2,opt,name=region,proto3" json:"region,omitempty"`
2135	// Required. The job ID.
2136	JobId string `protobuf:"bytes,3,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
2137	// Required. The changes to the job.
2138	Job *Job `protobuf:"bytes,4,opt,name=job,proto3" json:"job,omitempty"`
2139	// Required. Specifies the path, relative to <code>Job</code>, of
2140	// the field to update. For example, to update the labels of a Job the
2141	// <code>update_mask</code> parameter would be specified as
2142	// <code>labels</code>, and the `PATCH` request body would specify the new
2143	// value. <strong>Note:</strong> Currently, <code>labels</code> is the only
2144	// field that can be updated.
2145	UpdateMask           *field_mask.FieldMask `protobuf:"bytes,5,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"`
2146	XXX_NoUnkeyedLiteral struct{}              `json:"-"`
2147	XXX_unrecognized     []byte                `json:"-"`
2148	XXX_sizecache        int32                 `json:"-"`
2149}
2150
2151func (m *UpdateJobRequest) Reset()         { *m = UpdateJobRequest{} }
2152func (m *UpdateJobRequest) String() string { return proto.CompactTextString(m) }
2153func (*UpdateJobRequest) ProtoMessage()    {}
2154func (*UpdateJobRequest) Descriptor() ([]byte, []int) {
2155	return fileDescriptor_20fb118582e1d7de, []int{18}
2156}
2157
2158func (m *UpdateJobRequest) XXX_Unmarshal(b []byte) error {
2159	return xxx_messageInfo_UpdateJobRequest.Unmarshal(m, b)
2160}
2161func (m *UpdateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
2162	return xxx_messageInfo_UpdateJobRequest.Marshal(b, m, deterministic)
2163}
2164func (m *UpdateJobRequest) XXX_Merge(src proto.Message) {
2165	xxx_messageInfo_UpdateJobRequest.Merge(m, src)
2166}
2167func (m *UpdateJobRequest) XXX_Size() int {
2168	return xxx_messageInfo_UpdateJobRequest.Size(m)
2169}
2170func (m *UpdateJobRequest) XXX_DiscardUnknown() {
2171	xxx_messageInfo_UpdateJobRequest.DiscardUnknown(m)
2172}
2173
2174var xxx_messageInfo_UpdateJobRequest proto.InternalMessageInfo
2175
2176func (m *UpdateJobRequest) GetProjectId() string {
2177	if m != nil {
2178		return m.ProjectId
2179	}
2180	return ""
2181}
2182
2183func (m *UpdateJobRequest) GetRegion() string {
2184	if m != nil {
2185		return m.Region
2186	}
2187	return ""
2188}
2189
2190func (m *UpdateJobRequest) GetJobId() string {
2191	if m != nil {
2192		return m.JobId
2193	}
2194	return ""
2195}
2196
2197func (m *UpdateJobRequest) GetJob() *Job {
2198	if m != nil {
2199		return m.Job
2200	}
2201	return nil
2202}
2203
2204func (m *UpdateJobRequest) GetUpdateMask() *field_mask.FieldMask {
2205	if m != nil {
2206		return m.UpdateMask
2207	}
2208	return nil
2209}
2210
2211// A list of jobs in a project.
2212type ListJobsResponse struct {
2213	// Output only. Jobs list.
2214	Jobs []*Job `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"`
2215	// Optional. This token is included in the response if there are more results
2216	// to fetch. To fetch additional results, provide this value as the
2217	// `page_token` in a subsequent <code>ListJobsRequest</code>.
2218	NextPageToken        string   `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
2219	XXX_NoUnkeyedLiteral struct{} `json:"-"`
2220	XXX_unrecognized     []byte   `json:"-"`
2221	XXX_sizecache        int32    `json:"-"`
2222}
2223
2224func (m *ListJobsResponse) Reset()         { *m = ListJobsResponse{} }
2225func (m *ListJobsResponse) String() string { return proto.CompactTextString(m) }
2226func (*ListJobsResponse) ProtoMessage()    {}
2227func (*ListJobsResponse) Descriptor() ([]byte, []int) {
2228	return fileDescriptor_20fb118582e1d7de, []int{19}
2229}
2230
2231func (m *ListJobsResponse) XXX_Unmarshal(b []byte) error {
2232	return xxx_messageInfo_ListJobsResponse.Unmarshal(m, b)
2233}
2234func (m *ListJobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
2235	return xxx_messageInfo_ListJobsResponse.Marshal(b, m, deterministic)
2236}
2237func (m *ListJobsResponse) XXX_Merge(src proto.Message) {
2238	xxx_messageInfo_ListJobsResponse.Merge(m, src)
2239}
2240func (m *ListJobsResponse) XXX_Size() int {
2241	return xxx_messageInfo_ListJobsResponse.Size(m)
2242}
2243func (m *ListJobsResponse) XXX_DiscardUnknown() {
2244	xxx_messageInfo_ListJobsResponse.DiscardUnknown(m)
2245}
2246
2247var xxx_messageInfo_ListJobsResponse proto.InternalMessageInfo
2248
2249func (m *ListJobsResponse) GetJobs() []*Job {
2250	if m != nil {
2251		return m.Jobs
2252	}
2253	return nil
2254}
2255
2256func (m *ListJobsResponse) GetNextPageToken() string {
2257	if m != nil {
2258		return m.NextPageToken
2259	}
2260	return ""
2261}
2262
2263// A request to cancel a job.
2264type CancelJobRequest struct {
2265	// Required. The ID of the Google Cloud Platform project that the job
2266	// belongs to.
2267	ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
2268	// Required. The Cloud Dataproc region in which to handle the request.
2269	Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
2270	// Required. The job ID.
2271	JobId                string   `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
2272	XXX_NoUnkeyedLiteral struct{} `json:"-"`
2273	XXX_unrecognized     []byte   `json:"-"`
2274	XXX_sizecache        int32    `json:"-"`
2275}
2276
2277func (m *CancelJobRequest) Reset()         { *m = CancelJobRequest{} }
2278func (m *CancelJobRequest) String() string { return proto.CompactTextString(m) }
2279func (*CancelJobRequest) ProtoMessage()    {}
2280func (*CancelJobRequest) Descriptor() ([]byte, []int) {
2281	return fileDescriptor_20fb118582e1d7de, []int{20}
2282}
2283
2284func (m *CancelJobRequest) XXX_Unmarshal(b []byte) error {
2285	return xxx_messageInfo_CancelJobRequest.Unmarshal(m, b)
2286}
2287func (m *CancelJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
2288	return xxx_messageInfo_CancelJobRequest.Marshal(b, m, deterministic)
2289}
2290func (m *CancelJobRequest) XXX_Merge(src proto.Message) {
2291	xxx_messageInfo_CancelJobRequest.Merge(m, src)
2292}
2293func (m *CancelJobRequest) XXX_Size() int {
2294	return xxx_messageInfo_CancelJobRequest.Size(m)
2295}
2296func (m *CancelJobRequest) XXX_DiscardUnknown() {
2297	xxx_messageInfo_CancelJobRequest.DiscardUnknown(m)
2298}
2299
2300var xxx_messageInfo_CancelJobRequest proto.InternalMessageInfo
2301
2302func (m *CancelJobRequest) GetProjectId() string {
2303	if m != nil {
2304		return m.ProjectId
2305	}
2306	return ""
2307}
2308
2309func (m *CancelJobRequest) GetRegion() string {
2310	if m != nil {
2311		return m.Region
2312	}
2313	return ""
2314}
2315
2316func (m *CancelJobRequest) GetJobId() string {
2317	if m != nil {
2318		return m.JobId
2319	}
2320	return ""
2321}
2322
2323// A request to delete a job.
2324type DeleteJobRequest struct {
2325	// Required. The ID of the Google Cloud Platform project that the job
2326	// belongs to.
2327	ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
2328	// Required. The Cloud Dataproc region in which to handle the request.
2329	Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
2330	// Required. The job ID.
2331	JobId                string   `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
2332	XXX_NoUnkeyedLiteral struct{} `json:"-"`
2333	XXX_unrecognized     []byte   `json:"-"`
2334	XXX_sizecache        int32    `json:"-"`
2335}
2336
2337func (m *DeleteJobRequest) Reset()         { *m = DeleteJobRequest{} }
2338func (m *DeleteJobRequest) String() string { return proto.CompactTextString(m) }
2339func (*DeleteJobRequest) ProtoMessage()    {}
2340func (*DeleteJobRequest) Descriptor() ([]byte, []int) {
2341	return fileDescriptor_20fb118582e1d7de, []int{21}
2342}
2343
2344func (m *DeleteJobRequest) XXX_Unmarshal(b []byte) error {
2345	return xxx_messageInfo_DeleteJobRequest.Unmarshal(m, b)
2346}
2347func (m *DeleteJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
2348	return xxx_messageInfo_DeleteJobRequest.Marshal(b, m, deterministic)
2349}
2350func (m *DeleteJobRequest) XXX_Merge(src proto.Message) {
2351	xxx_messageInfo_DeleteJobRequest.Merge(m, src)
2352}
2353func (m *DeleteJobRequest) XXX_Size() int {
2354	return xxx_messageInfo_DeleteJobRequest.Size(m)
2355}
2356func (m *DeleteJobRequest) XXX_DiscardUnknown() {
2357	xxx_messageInfo_DeleteJobRequest.DiscardUnknown(m)
2358}
2359
2360var xxx_messageInfo_DeleteJobRequest proto.InternalMessageInfo
2361
2362func (m *DeleteJobRequest) GetProjectId() string {
2363	if m != nil {
2364		return m.ProjectId
2365	}
2366	return ""
2367}
2368
2369func (m *DeleteJobRequest) GetRegion() string {
2370	if m != nil {
2371		return m.Region
2372	}
2373	return ""
2374}
2375
2376func (m *DeleteJobRequest) GetJobId() string {
2377	if m != nil {
2378		return m.JobId
2379	}
2380	return ""
2381}
2382
2383func init() {
2384	proto.RegisterEnum("google.cloud.dataproc.v1beta2.LoggingConfig_Level", LoggingConfig_Level_name, LoggingConfig_Level_value)
2385	proto.RegisterEnum("google.cloud.dataproc.v1beta2.JobStatus_State", JobStatus_State_name, JobStatus_State_value)
2386	proto.RegisterEnum("google.cloud.dataproc.v1beta2.JobStatus_Substate", JobStatus_Substate_name, JobStatus_Substate_value)
2387	proto.RegisterEnum("google.cloud.dataproc.v1beta2.YarnApplication_State", YarnApplication_State_name, YarnApplication_State_value)
2388	proto.RegisterEnum("google.cloud.dataproc.v1beta2.ListJobsRequest_JobStateMatcher", ListJobsRequest_JobStateMatcher_name, ListJobsRequest_JobStateMatcher_value)
2389	proto.RegisterType((*LoggingConfig)(nil), "google.cloud.dataproc.v1beta2.LoggingConfig")
2390	proto.RegisterMapType((map[string]LoggingConfig_Level)(nil), "google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry")
2391	proto.RegisterType((*HadoopJob)(nil), "google.cloud.dataproc.v1beta2.HadoopJob")
2392	proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry")
2393	proto.RegisterType((*SparkJob)(nil), "google.cloud.dataproc.v1beta2.SparkJob")
2394	proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry")
2395	proto.RegisterType((*PySparkJob)(nil), "google.cloud.dataproc.v1beta2.PySparkJob")
2396	proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry")
2397	proto.RegisterType((*QueryList)(nil), "google.cloud.dataproc.v1beta2.QueryList")
2398	proto.RegisterType((*HiveJob)(nil), "google.cloud.dataproc.v1beta2.HiveJob")
2399	proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry")
2400	proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry")
2401	proto.RegisterType((*SparkSqlJob)(nil), "google.cloud.dataproc.v1beta2.SparkSqlJob")
2402	proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry")
2403	proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry")
2404	proto.RegisterType((*PigJob)(nil), "google.cloud.dataproc.v1beta2.PigJob")
2405	proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry")
2406	proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry")
2407	proto.RegisterType((*SparkRJob)(nil), "google.cloud.dataproc.v1beta2.SparkRJob")
2408	proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry")
2409	proto.RegisterType((*JobPlacement)(nil), "google.cloud.dataproc.v1beta2.JobPlacement")
2410	proto.RegisterType((*JobStatus)(nil), "google.cloud.dataproc.v1beta2.JobStatus")
2411	proto.RegisterType((*JobReference)(nil), "google.cloud.dataproc.v1beta2.JobReference")
2412	proto.RegisterType((*YarnApplication)(nil), "google.cloud.dataproc.v1beta2.YarnApplication")
2413	proto.RegisterType((*Job)(nil), "google.cloud.dataproc.v1beta2.Job")
2414	proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.Job.LabelsEntry")
2415	proto.RegisterType((*JobScheduling)(nil), "google.cloud.dataproc.v1beta2.JobScheduling")
2416	proto.RegisterType((*SubmitJobRequest)(nil), "google.cloud.dataproc.v1beta2.SubmitJobRequest")
2417	proto.RegisterType((*GetJobRequest)(nil), "google.cloud.dataproc.v1beta2.GetJobRequest")
2418	proto.RegisterType((*ListJobsRequest)(nil), "google.cloud.dataproc.v1beta2.ListJobsRequest")
2419	proto.RegisterType((*UpdateJobRequest)(nil), "google.cloud.dataproc.v1beta2.UpdateJobRequest")
2420	proto.RegisterType((*ListJobsResponse)(nil), "google.cloud.dataproc.v1beta2.ListJobsResponse")
2421	proto.RegisterType((*CancelJobRequest)(nil), "google.cloud.dataproc.v1beta2.CancelJobRequest")
2422	proto.RegisterType((*DeleteJobRequest)(nil), "google.cloud.dataproc.v1beta2.DeleteJobRequest")
2423}
2424
2425func init() {
2426	proto.RegisterFile("google/cloud/dataproc/v1beta2/jobs.proto", fileDescriptor_20fb118582e1d7de)
2427}
2428
2429var fileDescriptor_20fb118582e1d7de = []byte{
2430	// 2407 bytes of a gzipped FileDescriptorProto
2431	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x5a, 0xcf, 0x73, 0xdb, 0xc6,
2432	0xf5, 0x17, 0x7f, 0x13, 0x8f, 0xfa, 0x01, 0x6d, 0xec, 0x7c, 0xf9, 0x65, 0x9a, 0x89, 0x82, 0x99,
2433	0xb8, 0xaa, 0xd3, 0x21, 0xc7, 0xac, 0xeb, 0x38, 0x76, 0xd3, 0x98, 0x22, 0x41, 0x93, 0x2a, 0x4d,
2434	0x31, 0x20, 0x69, 0x37, 0xe9, 0x74, 0x10, 0x90, 0x5c, 0x51, 0xa0, 0x40, 0x00, 0x5e, 0x00, 0xaa,
2435	0x19, 0x8f, 0x2f, 0xbd, 0xf4, 0xd0, 0x63, 0x8f, 0x9d, 0xc9, 0x4c, 0x7b, 0x6a, 0x66, 0x7a, 0xe9,
2436	0xb5, 0xff, 0x40, 0xa7, 0x97, 0xf6, 0x90, 0x3f, 0xa1, 0x3d, 0xf4, 0xd8, 0x43, 0xa7, 0xe7, 0xce,
2437	0xee, 0x02, 0x10, 0x49, 0xc9, 0x26, 0x64, 0xb5, 0x4d, 0x9d, 0x93, 0x80, 0xf7, 0x6b, 0xdf, 0xee,
2438	0xe7, 0xb3, 0x6f, 0xdf, 0x82, 0x82, 0xdd, 0xb1, 0x65, 0x8d, 0x0d, 0x5c, 0x1a, 0x1a, 0x96, 0x37,
2439	0x2a, 0x8d, 0x34, 0x57, 0xb3, 0x89, 0x35, 0x2c, 0x9d, 0xdc, 0x18, 0x60, 0x57, 0x2b, 0x97, 0x26,
2440	0xd6, 0xc0, 0x29, 0xda, 0xc4, 0x72, 0x2d, 0xf4, 0x26, 0xb7, 0x2c, 0x32, 0xcb, 0x62, 0x60, 0x59,
2441	0xf4, 0x2d, 0x0b, 0xdf, 0xf0, 0x03, 0x69, 0xb6, 0x5e, 0xd2, 0x4c, 0xd3, 0x72, 0x35, 0x57, 0xb7,
2442	0x4c, 0xdf, 0xb9, 0xf0, 0x86, 0xaf, 0x65, 0x6f, 0x03, 0xef, 0xb0, 0x84, 0xa7, 0xb6, 0x3b, 0xf3,
2443	0x95, 0x3b, 0xcb, 0xca, 0x43, 0x1d, 0x1b, 0x23, 0x75, 0xaa, 0x39, 0xc7, 0xbe, 0xc5, 0x5b, 0xcb,
2444	0x16, 0xae, 0x3e, 0xc5, 0x8e, 0xab, 0x4d, 0x6d, 0x6e, 0x20, 0xfd, 0x2d, 0x0e, 0x1b, 0x2d, 0x6b,
2445	0x3c, 0xd6, 0xcd, 0x71, 0xd5, 0x32, 0x0f, 0xf5, 0x31, 0x9a, 0xc2, 0xf6, 0x88, 0xe8, 0x27, 0x98,
2446	0xa8, 0x86, 0x35, 0x56, 0x0d, 0x7c, 0x82, 0x0d, 0x27, 0x1f, 0xdf, 0x49, 0xec, 0xe6, 0xca, 0x95,
2447	0xe2, 0x0b, 0xa7, 0x52, 0x5c, 0x08, 0x54, 0xac, 0xb1, 0x28, 0x2d, 0x6b, 0xdc, 0x62, 0x31, 0x64,
2448	0xd3, 0x25, 0x33, 0x65, 0x6b, 0xb4, 0x28, 0x2d, 0x9c, 0xc0, 0x95, 0xf3, 0x0c, 0x91, 0x08, 0x89,
2449	0x63, 0x3c, 0xcb, 0xc7, 0x76, 0x62, 0xbb, 0x82, 0x42, 0x1f, 0x51, 0x03, 0x52, 0x27, 0x9a, 0xe1,
2450	0xe1, 0x7c, 0x7c, 0x27, 0xb6, 0xbb, 0x59, 0x2e, 0x5f, 0x28, 0x19, 0x16, 0x5a, 0xe1, 0x01, 0xee,
2451	0xc4, 0x6f, 0xc7, 0x24, 0x1b, 0x52, 0x4c, 0x86, 0xae, 0xc2, 0x76, 0x4b, 0x7e, 0x28, 0xb7, 0xd4,
2452	0x7e, 0xbb, 0xdb, 0x91, 0xab, 0xcd, 0x7a, 0x53, 0xae, 0x89, 0x6b, 0x28, 0x03, 0x89, 0x4a, 0xab,
2453	0x25, 0xc6, 0x90, 0x00, 0xa9, 0x9e, 0x52, 0xa9, 0xca, 0x62, 0x9c, 0x3e, 0xd6, 0xe4, 0xbd, 0xfe,
2454	0x7d, 0x31, 0x81, 0xb2, 0x90, 0x6c, 0xb6, 0xeb, 0x07, 0x62, 0x92, 0x3e, 0x3d, 0xaa, 0x28, 0x6d,
2455	0x31, 0x45, 0xd5, 0xb2, 0xa2, 0x1c, 0x28, 0x62, 0x9a, 0x3e, 0xd6, 0x2b, 0xbd, 0x4a, 0x4b, 0xcc,
2456	0xd0, 0x40, 0x07, 0xf5, 0xba, 0x98, 0x95, 0xfe, 0x98, 0x00, 0xa1, 0xa1, 0x8d, 0x2c, 0xcb, 0xde,
2457	0xb7, 0x06, 0xe8, 0x5d, 0xd8, 0x9e, 0x6a, 0xba, 0xa9, 0x4e, 0x34, 0xa2, 0x1e, 0xea, 0x06, 0x56,
2458	0x3d, 0xa2, 0xf3, 0xd9, 0x36, 0xd6, 0x94, 0x4d, 0xaa, 0xda, 0xd7, 0x48, 0x5d, 0x37, 0x70, 0x9f,
2459	0xe8, 0xe8, 0x2d, 0x00, 0x66, 0x3c, 0x34, 0x34, 0xc7, 0x61, 0xf3, 0xa7, 0x56, 0x02, 0x95, 0x55,
2460	0xa9, 0x08, 0x21, 0x48, 0x6a, 0x64, 0xec, 0xe4, 0x13, 0x3b, 0x89, 0x5d, 0x41, 0x61, 0xcf, 0x48,
2461	0x82, 0x8d, 0xf9, 0xe0, 0x4e, 0x3e, 0xc9, 0x94, 0xb9, 0x49, 0x18, 0xd7, 0x41, 0x6f, 0x80, 0x70,
2462	0xaa, 0x4f, 0x31, 0x7d, 0xf6, 0x30, 0x50, 0xbe, 0x0d, 0xeb, 0x1a, 0x19, 0x1e, 0xe9, 0x27, 0xbe,
2463	0x3e, 0xcd, 0xfd, 0x7d, 0x19, 0x33, 0xf9, 0x21, 0x80, 0x4d, 0x2c, 0x1b, 0x13, 0x57, 0xc7, 0x4e,
2464	0x3e, 0xc3, 0x58, 0x72, 0x7b, 0x05, 0x30, 0xe1, 0x1a, 0x14, 0x3b, 0xa1, 0x2b, 0x27, 0xc7, 0x5c,
2465	0x2c, 0xd4, 0x85, 0x4d, 0x83, 0x23, 0xa8, 0x0e, 0x19, 0x84, 0xf9, 0xec, 0x4e, 0x6c, 0x37, 0x57,
2466	0xfe, 0xf6, 0x45, 0x60, 0x57, 0x36, 0x8c, 0xf9, 0xd7, 0xc2, 0x07, 0xb0, 0xb5, 0x34, 0xe6, 0x39,
2467	0x3c, 0xbb, 0x32, 0xcf, 0x33, 0x61, 0x8e, 0x33, 0x7b, 0x59, 0x48, 0x73, 0xfa, 0x4a, 0x7f, 0x48,
2468	0x40, 0xb6, 0x6b, 0x6b, 0xe4, 0xf8, 0xeb, 0x03, 0xe5, 0xa3, 0x73, 0xa0, 0x7c, 0x6f, 0xc5, 0x62,
2469	0x07, 0x4b, 0xf0, 0x0a, 0x23, 0xf9, 0xa7, 0x04, 0x40, 0x67, 0x16, 0x62, 0x59, 0x82, 0x2b, 0x0c,
2470	0x1e, 0x7b, 0xe6, 0x1e, 0x59, 0xe6, 0x12, 0x9c, 0x0a, 0xc3, 0xb9, 0xc3, 0x54, 0x01, 0x9e, 0x01,
2471	0x5c, 0xf1, 0x39, 0xb8, 0x76, 0x41, 0x5c, 0xf2, 0x0f, 0xe0, 0xdc, 0xb4, 0xe7, 0x9d, 0xff, 0x3b,
2472	0xc0, 0x7e, 0x7c, 0x0e, 0xb0, 0xef, 0xaf, 0x58, 0xfb, 0xd3, 0x15, 0x79, 0xd5, 0xa0, 0x95, 0xde,
2473	0x01, 0xe1, 0x23, 0x0f, 0x93, 0x59, 0x4b, 0x77, 0x5c, 0x94, 0x87, 0xcc, 0x63, 0x0f, 0x13, 0x3a,
2474	0xf1, 0x18, 0x5b, 0x99, 0xe0, 0x55, 0xfa, 0x3c, 0x09, 0x99, 0x86, 0x7e, 0x82, 0x29, 0xe8, 0xd7,
2475	0x60, 0x93, 0x8a, 0x67, 0x67, 0x77, 0xef, 0x3a, 0x93, 0x07, 0x58, 0x37, 0x01, 0xb8, 0x9d, 0xa1,
2476	0x3b, 0x2e, 0x1b, 0x39, 0x57, 0xde, 0x5d, 0x31, 0xd5, 0x30, 0x17, 0xba, 0xcb, 0x1f, 0x87, 0x89,
2477	0x15, 0xe1, 0xb5, 0xa1, 0x65, 0xba, 0xba, 0xe9, 0x61, 0x95, 0xf2, 0x44, 0xd3, 0x0d, 0x8f, 0xe0,
2478	0x7c, 0x62, 0x27, 0xb6, 0x9b, 0x55, 0xb6, 0x03, 0xd5, 0x81, 0x59, 0xe7, 0x0a, 0x74, 0x08, 0xa2,
2479	0x33, 0x24, 0xba, 0xed, 0xaa, 0x27, 0x1a, 0xd1, 0xb5, 0x81, 0x81, 0x39, 0x57, 0x72, 0xe5, 0xbb,
2480	0xab, 0xca, 0x2d, 0x9f, 0x64, 0xb1, 0xcb, 0xdc, 0x1f, 0x06, 0xde, 0xfe, 0x71, 0xec, 0x2c, 0x4a,
2481	0xd1, 0xc3, 0x05, 0xb2, 0xa4, 0xd8, 0x08, 0xb7, 0x22, 0x8e, 0xf0, 0x22, 0xa6, 0x9c, 0x21, 0x7a,
2482	0xfa, 0x0c, 0xd1, 0x0b, 0x7b, 0x70, 0xe5, 0xbc, 0x24, 0x2f, 0x82, 0xfe, 0x65, 0xeb, 0x82, 0x10,
2483	0xf2, 0x45, 0xfa, 0x73, 0x12, 0x72, 0x6c, 0x13, 0x74, 0x1f, 0x1b, 0x5f, 0x11, 0x49, 0x26, 0xe7,
2484	0x80, 0x9e, 0x60, 0x90, 0x7c, 0x18, 0xa5, 0x30, 0xf3, 0xc4, 0x23, 0x02, 0xff, 0xc9, 0x02, 0xf0,
2485	0x9c, 0x5a, 0x77, 0x2e, 0x30, 0xca, 0x85, 0xc0, 0xbf, 0x7d, 0xb6, 0xca, 0x9d, 0x2d, 0x25, 0xe9,
2486	0xcb, 0x97, 0x92, 0xff, 0x2d, 0x46, 0xfd, 0x3d, 0x09, 0xe9, 0x8e, 0x3e, 0x7e, 0x45, 0x2a, 0x0e,
2487	0x7e, 0x6e, 0xc5, 0x59, 0x45, 0x0b, 0x3e, 0xc7, 0x88, 0xbc, 0xeb, 0x9f, 0x53, 0x70, 0xbe, 0x1b,
2488	0x6d, 0x80, 0x4b, 0xd6, 0x9b, 0x73, 0x28, 0x97, 0xf9, 0xba, 0x51, 0xee, 0x1f, 0x71, 0x10, 0xd8,
2489	0x2e, 0x55, 0x28, 0xeb, 0xde, 0x81, 0x2d, 0xd6, 0xdc, 0x2c, 0xb7, 0xa9, 0xca, 0x3a, 0x15, 0x2b,
2490	0x2f, 0x6a, 0x69, 0x16, 0x9a, 0x90, 0xc4, 0x8a, 0x26, 0x24, 0xb9, 0xea, 0xa2, 0x90, 0x8a, 0x74,
2491	0x51, 0x08, 0x13, 0xbf, 0x60, 0x0f, 0x92, 0xfe, 0xca, 0x7b, 0x90, 0x1e, 0xac, 0xef, 0x5b, 0x83,
2492	0x8e, 0xa1, 0x0d, 0xf1, 0x14, 0x9b, 0x2e, 0x5d, 0xa0, 0xa1, 0xe1, 0x39, 0x2e, 0x26, 0xaa, 0xa9,
2493	0x4d, 0xb1, 0x1f, 0x24, 0xe7, 0xcb, 0xda, 0xda, 0x14, 0xcf, 0x9b, 0x78, 0x9e, 0x3e, 0xf2, 0x63,
2494	0x06, 0x26, 0x7d, 0x4f, 0x1f, 0x49, 0xff, 0x4c, 0x80, 0xb0, 0x6f, 0x0d, 0xba, 0xae, 0xe6, 0x7a,
2495	0x0e, 0xaa, 0x41, 0xca, 0x71, 0x35, 0x97, 0x07, 0xdb, 0x2c, 0x17, 0x57, 0x4c, 0x37, 0x74, 0x2c,
2496	0xd2, 0x3f, 0x58, 0xe1, 0xce, 0xb4, 0x41, 0x1a, 0x61, 0x57, 0xd3, 0x0d, 0xff, 0x2e, 0xa2, 0x04,
2497	0xaf, 0xa8, 0x06, 0x22, 0x33, 0x51, 0x1d, 0x57, 0x23, 0xae, 0xea, 0xea, 0x53, 0xec, 0xaf, 0x6c,
2498	0x21, 0x18, 0x2a, 0xf8, 0xaa, 0x50, 0xec, 0x05, 0x5f, 0x15, 0x94, 0x4d, 0xe6, 0xd3, 0xa5, 0x2e,
2499	0x54, 0x88, 0x1e, 0x40, 0xd6, 0xf1, 0x06, 0x3c, 0xd1, 0x0c, 0x4b, 0xf4, 0x46, 0xf4, 0x44, 0x7d,
2500	0x47, 0x25, 0x0c, 0x21, 0x7d, 0x11, 0x83, 0x14, 0xcb, 0x9f, 0x5e, 0xdb, 0xbb, 0xbd, 0x4a, 0x4f,
2501	0x5e, 0xba, 0xb6, 0xe7, 0x20, 0xd3, 0x91, 0xdb, 0xb5, 0x66, 0xfb, 0xbe, 0x18, 0x43, 0x9b, 0x00,
2502	0x5d, 0xb9, 0xd7, 0xef, 0xa8, 0xb5, 0x83, 0xb6, 0x2c, 0x66, 0xa9, 0x52, 0xe9, 0xb7, 0xdb, 0x54,
2503	0x19, 0x47, 0x08, 0x36, 0xab, 0x95, 0x76, 0x55, 0x6e, 0xa9, 0x81, 0x43, 0x62, 0x4e, 0xd6, 0xed,
2504	0x55, 0x94, 0x9e, 0x5c, 0x13, 0x33, 0x68, 0x03, 0x04, 0x2e, 0x6b, 0xc9, 0x35, 0x7e, 0xdd, 0x67,
2505	0xd1, 0x16, 0xae, 0xfb, 0xaf, 0xc1, 0x56, 0xa5, 0xd7, 0x93, 0x1f, 0x74, 0x7a, 0x6a, 0xbd, 0xd2,
2506	0x6c, 0xf5, 0x15, 0x59, 0x14, 0xa4, 0x06, 0x64, 0x83, 0x19, 0xa0, 0x2d, 0xc8, 0x2d, 0xe6, 0xb9,
2507	0x01, 0x42, 0xb7, 0xbf, 0xf7, 0xa0, 0xd9, 0xa3, 0x83, 0xc4, 0x10, 0x40, 0xfa, 0xa3, 0xbe, 0xdc,
2508	0x97, 0x6b, 0x62, 0x1c, 0x89, 0xb0, 0xde, 0xed, 0x55, 0x5a, 0x32, 0xcd, 0xa1, 0xd7, 0xef, 0x8a,
2509	0x09, 0xa9, 0xc6, 0xe8, 0xa4, 0xe0, 0x43, 0x4c, 0xb0, 0x39, 0xc4, 0xe8, 0x4d, 0xb6, 0x99, 0x26,
2510	0x78, 0xe8, 0xaa, 0xfa, 0xc8, 0x27, 0x93, 0xe0, 0x4b, 0x9a, 0x23, 0x74, 0x15, 0xd2, 0x13, 0x6b,
2511	0xa0, 0x86, 0x24, 0x4a, 0x4d, 0xac, 0x41, 0x73, 0x24, 0xfd, 0x3e, 0x0e, 0x5b, 0x1f, 0x6b, 0xc4,
2512	0xac, 0xd8, 0xb6, 0xa1, 0x0f, 0xd9, 0x57, 0x26, 0xba, 0xd5, 0xe7, 0x08, 0xc9, 0x9e, 0xd1, 0x7e,
2513	0x40, 0x2c, 0xfe, 0x9d, 0xe5, 0xe6, 0x0a, 0xbc, 0x96, 0x42, 0x2e, 0xd2, 0xab, 0x00, 0x59, 0x9b,
2514	0x58, 0x63, 0x82, 0x1d, 0x87, 0x9d, 0x34, 0x71, 0x25, 0x7c, 0xa7, 0x8c, 0x77, 0x89, 0x36, 0x3c,
2515	0xa6, 0x3b, 0xd7, 0x23, 0x46, 0x3e, 0xc9, 0x19, 0x1f, 0xc8, 0xfa, 0xc4, 0x90, 0x7e, 0xb6, 0x0a,
2516	0xee, 0x0c, 0x24, 0xda, 0xf2, 0x23, 0x0e, 0x75, 0x5b, 0x7e, 0xa4, 0x76, 0x2b, 0x0f, 0x39, 0xba,
2517	0x0b, 0xeb, 0x9b, 0x40, 0xeb, 0x90, 0xad, 0x54, 0xab, 0x72, 0xa7, 0xc7, 0x30, 0x9c, 0xe3, 0x41,
2518	0x8a, 0xaa, 0xea, 0xcd, 0x76, 0xb3, 0xdb, 0x90, 0x6b, 0x62, 0x9a, 0x02, 0x41, 0x11, 0x64, 0xc8,
2519	0x03, 0xa4, 0x7f, 0xd0, 0x64, 0xb0, 0x67, 0xa5, 0x5f, 0x03, 0x24, 0x68, 0x09, 0x6d, 0x82, 0x40,
2520	0x02, 0x1c, 0xd8, 0xaa, 0xe5, 0xca, 0xef, 0xae, 0x26, 0x74, 0x08, 0x9d, 0x72, 0xea, 0x4d, 0x43,
2521	0xd9, 0x41, 0x85, 0xf0, 0x8f, 0xf6, 0x08, 0xa1, 0xc2, 0xa2, 0xa2, 0x9c, 0x7a, 0xd3, 0x36, 0xe1,
2522	0x88, 0x7d, 0x55, 0x51, 0x27, 0xd6, 0x80, 0x2d, 0xf4, 0xea, 0x36, 0x21, 0xfc, 0x0c, 0x43, 0xdb,
2523	0x84, 0xa3, 0xf0, 0xbb, 0x54, 0x1d, 0x04, 0x87, 0xd6, 0x5d, 0x16, 0x29, 0xc9, 0x22, 0x7d, 0x33,
2524	0xe2, 0x57, 0x80, 0xc6, 0x9a, 0x92, 0x75, 0x82, 0x8b, 0x74, 0x0b, 0x72, 0xf6, 0xec, 0x34, 0x52,
2525	0x8a, 0x45, 0xfa, 0x56, 0xe4, 0x6b, 0x67, 0x63, 0x4d, 0x01, 0xdf, 0x9f, 0x46, 0xab, 0x42, 0x96,
2526	0x1d, 0x2f, 0x34, 0x14, 0x2f, 0x42, 0xd7, 0xa2, 0x5d, 0x4a, 0x1a, 0x6b, 0x4a, 0xe6, 0xc8, 0xbf,
2527	0xe6, 0xdd, 0x83, 0x8c, 0xad, 0x8f, 0x59, 0x0c, 0x7e, 0xd0, 0xbf, 0x13, 0xa9, 0xcf, 0x68, 0xac,
2528	0x29, 0x69, 0x9b, 0xb7, 0x6d, 0xfb, 0x90, 0xe3, 0x53, 0x22, 0x2c, 0xca, 0xd5, 0x48, 0x0b, 0x1d,
2529	0x1e, 0x63, 0x74, 0xa1, 0x9d, 0xf0, 0x30, 0xee, 0xc0, 0x06, 0x8f, 0xe5, 0x3c, 0x36, 0x58, 0xb4,
2530	0x75, 0x16, 0xed, 0x7a, 0xf4, 0x9e, 0xbb, 0xb1, 0xa6, 0xf0, 0x74, 0xfc, 0x1b, 0xca, 0x3d, 0x48,
2531	0x3b, 0xac, 0x72, 0xfa, 0xb7, 0xf0, 0xdd, 0xa8, 0x95, 0x56, 0xf1, 0xfd, 0xd0, 0x01, 0x6c, 0xf2,
2532	0x27, 0xf5, 0x48, 0x77, 0x5c, 0x8b, 0xcc, 0xf2, 0x1b, 0xec, 0xa4, 0x8e, 0x1e, 0x69, 0x83, 0xfb,
2533	0x37, 0xb8, 0x3b, 0xfa, 0x11, 0x6c, 0xcf, 0x34, 0x62, 0xaa, 0xda, 0x69, 0x81, 0x70, 0xf2, 0x02,
2534	0x8b, 0x59, 0xbc, 0x58, 0x5d, 0x51, 0xc4, 0xd9, 0xa2, 0x80, 0x15, 0x10, 0xc7, 0x1b, 0x4c, 0x75,
2535	0xd7, 0xc5, 0x23, 0x75, 0x30, 0xcb, 0x03, 0x2f, 0x20, 0xa1, 0x6c, 0x6f, 0x86, 0xee, 0x42, 0xc1,
2536	0xff, 0x98, 0x6d, 0x79, 0xae, 0xed, 0xb9, 0x2a, 0xc1, 0x8e, 0xe5, 0x91, 0x21, 0x6f, 0x7e, 0xb6,
2537	0x99, 0xc3, 0xff, 0x71, 0x8b, 0x03, 0x66, 0xa0, 0xf8, 0x7a, 0xda, 0x07, 0xbd, 0x07, 0x79, 0xdf,
2538	0x99, 0x76, 0xc7, 0xc4, 0x32, 0x58, 0xdb, 0xe4, 0x30, 0xd7, 0x2d, 0xe6, 0x7a, 0x95, 0xeb, 0xab,
2539	0x5c, 0x4d, 0xfb, 0x27, 0x87, 0x3a, 0xd6, 0x21, 0x6d, 0x68, 0x03, 0x6c, 0x38, 0x79, 0x14, 0x69,
2540	0xaa, 0xb4, 0xc5, 0x69, 0x31, 0x07, 0xde, 0xde, 0xf8, 0xde, 0xa8, 0x05, 0xe0, 0x0c, 0x8f, 0xf0,
2541	0xc8, 0x33, 0x74, 0x73, 0x9c, 0xbf, 0x12, 0xa9, 0xad, 0xa1, 0x50, 0x84, 0x3e, 0xca, 0x9c, 0x3f,
2542	0xfa, 0x7f, 0xc8, 0xd2, 0x63, 0x81, 0x75, 0x17, 0xaf, 0xf3, 0xb3, 0x7e, 0x62, 0x0d, 0x68, 0x67,
2543	0x51, 0x78, 0x1f, 0x72, 0x73, 0xe3, 0x5f, 0xa8, 0xd9, 0x04, 0xc8, 0xba, 0x33, 0x9b, 0xed, 0x4c,
2544	0x69, 0x0f, 0x36, 0x16, 0x86, 0x47, 0x37, 0xe0, 0xea, 0x54, 0x7b, 0x12, 0xdc, 0x35, 0x1c, 0xd5,
2545	0xc6, 0x44, 0x3d, 0xb2, 0x3c, 0xc2, 0x42, 0xa7, 0x14, 0x34, 0xd5, 0x9e, 0xf8, 0xd7, 0x0d, 0xa7,
2546	0x83, 0x49, 0xc3, 0xf2, 0x88, 0xf4, 0x79, 0x0c, 0xc4, 0x2e, 0x43, 0x90, 0xd5, 0xcd, 0xc7, 0x1e,
2547	0x76, 0xdc, 0x55, 0x07, 0xde, 0xeb, 0x90, 0x26, 0x78, 0xac, 0x5b, 0x26, 0x2b, 0x7d, 0x82, 0xe2,
2548	0xbf, 0xa1, 0x9b, 0x90, 0xa0, 0x1b, 0x8b, 0xd7, 0x56, 0x29, 0x42, 0x99, 0xa6, 0xe6, 0x74, 0x30,
2549	0xc2, 0xc7, 0xa5, 0x83, 0xf1, 0x53, 0x49, 0xf0, 0x25, 0xcd, 0x91, 0xf4, 0x63, 0xd8, 0xb8, 0x8f,
2550	0xff, 0x0d, 0xc9, 0x3d, 0xe7, 0x94, 0xfe, 0x4b, 0x1c, 0xb6, 0xe8, 0x7d, 0x6d, 0xdf, 0x1a, 0x38,
2551	0x17, 0x1e, 0x21, 0xbd, 0x30, 0xc2, 0x1b, 0x20, 0xd8, 0xda, 0x18, 0xab, 0x8e, 0xfe, 0x19, 0x07,
2552	0x2e, 0xa5, 0x64, 0xa9, 0xa0, 0xab, 0x7f, 0xc6, 0x7b, 0x08, 0xaa, 0x74, 0xad, 0x63, 0x1c, 0xa4,
2553	0xc6, 0xcc, 0x7b, 0x54, 0x70, 0xa6, 0x63, 0x4d, 0x9e, 0xed, 0x58, 0x27, 0xb0, 0x4d, 0x27, 0xc0,
2554	0x9b, 0xc4, 0xa9, 0xe6, 0x0e, 0x8f, 0x30, 0x61, 0x75, 0x7e, 0xb3, 0xfc, 0xfd, 0x55, 0xbd, 0xf7,
2555	0xe2, 0x04, 0x83, 0xfa, 0x81, 0x1f, 0xf0, 0x28, 0xca, 0xd6, 0x64, 0x51, 0x40, 0xa7, 0x78, 0xa8,
2556	0x1b, 0x2e, 0x26, 0xac, 0x72, 0x0b, 0x8a, 0xff, 0x26, 0xdd, 0x82, 0xad, 0x25, 0xdf, 0xe0, 0x87,
2557	0x9b, 0x35, 0x7a, 0x7c, 0x57, 0xaa, 0xbd, 0xe6, 0x43, 0xd9, 0x6f, 0x0f, 0x0e, 0xda, 0xaa, 0xff,
2558	0x1e, 0x97, 0xbe, 0x8c, 0x81, 0xd8, 0xb7, 0x47, 0x9a, 0x8b, 0x5f, 0x06, 0xc8, 0xf8, 0x73, 0x80,
2559	0x4c, 0xcc, 0x01, 0x19, 0x90, 0x2f, 0x79, 0x31, 0xf2, 0xdd, 0x85, 0x9c, 0xc7, 0xf2, 0x62, 0xbf,
2560	0xe2, 0xf9, 0xc7, 0xe6, 0xd9, 0x86, 0xbb, 0xae, 0x63, 0x63, 0xf4, 0x40, 0x73, 0x8e, 0x15, 0xe0,
2561	0xe6, 0xf4, 0x59, 0x22, 0x20, 0x9e, 0xae, 0xac, 0x63, 0x5b, 0xa6, 0x83, 0xd1, 0x2d, 0x48, 0x4e,
2562	0xac, 0x01, 0xff, 0xfc, 0x19, 0x2d, 0x0f, 0x66, 0x8f, 0xae, 0xc1, 0x96, 0x89, 0x9f, 0xb8, 0xea,
2563	0x1c, 0x49, 0xf8, 0xb4, 0x37, 0xa8, 0xb8, 0x13, 0x10, 0x45, 0xfa, 0x14, 0xc4, 0xaa, 0x66, 0x0e,
2564	0xb1, 0xf1, 0x1f, 0xdb, 0x11, 0x9f, 0x82, 0x58, 0xc3, 0x06, 0x7e, 0x39, 0xa8, 0xa2, 0x8c, 0x50,
2565	0xfe, 0x79, 0x96, 0x15, 0x2e, 0xbf, 0x8c, 0x1b, 0x98, 0xa0, 0xdf, 0xc4, 0x40, 0x08, 0xab, 0x10,
2566	0x2a, 0xad, 0x3a, 0x93, 0x97, 0xea, 0x55, 0x21, 0xc2, 0x32, 0x4b, 0xf5, 0x9f, 0x7e, 0xf9, 0xd7,
2567	0x5f, 0xc4, 0xef, 0x49, 0x77, 0xc3, 0xdf, 0x8c, 0xfd, 0xfc, 0x9d, 0xd2, 0xd3, 0xd3, 0xb9, 0x3d,
2568	0x2b, 0xf1, 0xd4, 0x9d, 0xd2, 0x53, 0xfe, 0xf0, 0x8c, 0xfd, 0xb4, 0x7c, 0x87, 0x1f, 0x72, 0x77,
2569	0x62, 0xd7, 0xd1, 0xaf, 0x62, 0x90, 0xe6, 0x05, 0x09, 0xad, 0x3a, 0x1b, 0x16, 0xea, 0x56, 0xa4,
2570	0x24, 0x65, 0x96, 0xe4, 0x87, 0xe8, 0x83, 0x97, 0x49, 0xb2, 0xf4, 0x94, 0x2f, 0xf6, 0x33, 0xf4,
2571	0x45, 0x0c, 0xb2, 0x01, 0x33, 0x51, 0xf1, 0x62, 0xc5, 0xa1, 0x50, 0x8a, 0x6c, 0xcf, 0x29, 0x2f,
2572	0x7d, 0x8f, 0x25, 0x7d, 0x0b, 0xdd, 0x7c, 0x99, 0xa4, 0xd1, 0x6f, 0x63, 0x20, 0x84, 0xa5, 0x61,
2573	0x25, 0xf4, 0xcb, 0x45, 0x24, 0xd2, 0xaa, 0xee, 0xb3, 0x04, 0x6b, 0xe5, 0xcb, 0xad, 0xea, 0x1d,
2574	0x56, 0x30, 0x7e, 0x17, 0x03, 0x21, 0xdc, 0x80, 0x2b, 0xd3, 0x5d, 0xde, 0xaa, 0x91, 0xd2, 0x3d,
2575	0x60, 0xe9, 0x36, 0xa5, 0xda, 0xe5, 0xd2, 0x1d, 0xb2, 0xb1, 0x29, 0x65, 0x7f, 0x19, 0x03, 0x21,
2576	0xdc, 0xd2, 0x2b, 0x73, 0x5e, 0xde, 0xfc, 0x85, 0xd7, 0xcf, 0x94, 0x43, 0x79, 0x6a, 0xbb, 0xb3,
2577	0x80, 0xac, 0xd7, 0x2f, 0xb7, 0xac, 0x7b, 0x3f, 0x81, 0xb7, 0x87, 0xd6, 0xf4, 0xc5, 0x49, 0xed,
2578	0x09, 0x94, 0x71, 0x1d, 0x3a, 0x7e, 0x27, 0xf6, 0x89, 0xec, 0xdb, 0x8e, 0x2d, 0x43, 0x33, 0xc7,
2579	0x45, 0x8b, 0x8c, 0x4b, 0x63, 0x6c, 0xb2, 0xec, 0x4a, 0x5c, 0xa5, 0xd9, 0xba, 0xf3, 0x9c, 0x7f,
2580	0x15, 0xb9, 0x1b, 0x08, 0x06, 0x69, 0xe6, 0xf1, 0x9d, 0x7f, 0x05, 0x00, 0x00, 0xff, 0xff, 0xdf,
2581	0xbb, 0x5c, 0x5b, 0x5b, 0x22, 0x00, 0x00,
2582}
2583
2584// Reference imports to suppress errors if they are not otherwise used.
2585var _ context.Context
2586var _ grpc.ClientConn
2587
2588// This is a compile-time assertion to ensure that this generated file
2589// is compatible with the grpc package it is being compiled against.
2590const _ = grpc.SupportPackageIsVersion4
2591
2592// JobControllerClient is the client API for JobController service.
2593//
2594// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
2595type JobControllerClient interface {
2596	// Submits a job to a cluster.
2597	SubmitJob(ctx context.Context, in *SubmitJobRequest, opts ...grpc.CallOption) (*Job, error)
2598	// Gets the resource representation for a job in a project.
2599	GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error)
2600	// Lists regions/{region}/jobs in a project.
2601	ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error)
2602	// Updates a job in a project.
2603	UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error)
2604	// Starts a job cancellation request. To access the job resource
2605	// after cancellation, call
2606	// [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list)
2607	// or
2608	// [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/get).
2609	CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*Job, error)
2610	// Deletes the job from the project. If the job is active, the delete fails,
2611	// and the response returns `FAILED_PRECONDITION`.
2612	DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error)
2613}
2614
2615type jobControllerClient struct {
2616	cc *grpc.ClientConn
2617}
2618
2619func NewJobControllerClient(cc *grpc.ClientConn) JobControllerClient {
2620	return &jobControllerClient{cc}
2621}
2622
2623func (c *jobControllerClient) SubmitJob(ctx context.Context, in *SubmitJobRequest, opts ...grpc.CallOption) (*Job, error) {
2624	out := new(Job)
2625	err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.JobController/SubmitJob", in, out, opts...)
2626	if err != nil {
2627		return nil, err
2628	}
2629	return out, nil
2630}
2631
2632func (c *jobControllerClient) GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) {
2633	out := new(Job)
2634	err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.JobController/GetJob", in, out, opts...)
2635	if err != nil {
2636		return nil, err
2637	}
2638	return out, nil
2639}
2640
2641func (c *jobControllerClient) ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) {
2642	out := new(ListJobsResponse)
2643	err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.JobController/ListJobs", in, out, opts...)
2644	if err != nil {
2645		return nil, err
2646	}
2647	return out, nil
2648}
2649
2650func (c *jobControllerClient) UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error) {
2651	out := new(Job)
2652	err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.JobController/UpdateJob", in, out, opts...)
2653	if err != nil {
2654		return nil, err
2655	}
2656	return out, nil
2657}
2658
2659func (c *jobControllerClient) CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*Job, error) {
2660	out := new(Job)
2661	err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.JobController/CancelJob", in, out, opts...)
2662	if err != nil {
2663		return nil, err
2664	}
2665	return out, nil
2666}
2667
2668func (c *jobControllerClient) DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
2669	out := new(empty.Empty)
2670	err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.JobController/DeleteJob", in, out, opts...)
2671	if err != nil {
2672		return nil, err
2673	}
2674	return out, nil
2675}
2676
2677// JobControllerServer is the server API for JobController service.
2678type JobControllerServer interface {
2679	// Submits a job to a cluster.
2680	SubmitJob(context.Context, *SubmitJobRequest) (*Job, error)
2681	// Gets the resource representation for a job in a project.
2682	GetJob(context.Context, *GetJobRequest) (*Job, error)
2683	// Lists regions/{region}/jobs in a project.
2684	ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error)
2685	// Updates a job in a project.
2686	UpdateJob(context.Context, *UpdateJobRequest) (*Job, error)
2687	// Starts a job cancellation request. To access the job resource
2688	// after cancellation, call
2689	// [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list)
2690	// or
2691	// [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/get).
2692	CancelJob(context.Context, *CancelJobRequest) (*Job, error)
2693	// Deletes the job from the project. If the job is active, the delete fails,
2694	// and the response returns `FAILED_PRECONDITION`.
2695	DeleteJob(context.Context, *DeleteJobRequest) (*empty.Empty, error)
2696}
2697
2698func RegisterJobControllerServer(s *grpc.Server, srv JobControllerServer) {
2699	s.RegisterService(&_JobController_serviceDesc, srv)
2700}
2701
2702func _JobController_SubmitJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
2703	in := new(SubmitJobRequest)
2704	if err := dec(in); err != nil {
2705		return nil, err
2706	}
2707	if interceptor == nil {
2708		return srv.(JobControllerServer).SubmitJob(ctx, in)
2709	}
2710	info := &grpc.UnaryServerInfo{
2711		Server:     srv,
2712		FullMethod: "/google.cloud.dataproc.v1beta2.JobController/SubmitJob",
2713	}
2714	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
2715		return srv.(JobControllerServer).SubmitJob(ctx, req.(*SubmitJobRequest))
2716	}
2717	return interceptor(ctx, in, info, handler)
2718}
2719
2720func _JobController_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
2721	in := new(GetJobRequest)
2722	if err := dec(in); err != nil {
2723		return nil, err
2724	}
2725	if interceptor == nil {
2726		return srv.(JobControllerServer).GetJob(ctx, in)
2727	}
2728	info := &grpc.UnaryServerInfo{
2729		Server:     srv,
2730		FullMethod: "/google.cloud.dataproc.v1beta2.JobController/GetJob",
2731	}
2732	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
2733		return srv.(JobControllerServer).GetJob(ctx, req.(*GetJobRequest))
2734	}
2735	return interceptor(ctx, in, info, handler)
2736}
2737
2738func _JobController_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
2739	in := new(ListJobsRequest)
2740	if err := dec(in); err != nil {
2741		return nil, err
2742	}
2743	if interceptor == nil {
2744		return srv.(JobControllerServer).ListJobs(ctx, in)
2745	}
2746	info := &grpc.UnaryServerInfo{
2747		Server:     srv,
2748		FullMethod: "/google.cloud.dataproc.v1beta2.JobController/ListJobs",
2749	}
2750	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
2751		return srv.(JobControllerServer).ListJobs(ctx, req.(*ListJobsRequest))
2752	}
2753	return interceptor(ctx, in, info, handler)
2754}
2755
2756func _JobController_UpdateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
2757	in := new(UpdateJobRequest)
2758	if err := dec(in); err != nil {
2759		return nil, err
2760	}
2761	if interceptor == nil {
2762		return srv.(JobControllerServer).UpdateJob(ctx, in)
2763	}
2764	info := &grpc.UnaryServerInfo{
2765		Server:     srv,
2766		FullMethod: "/google.cloud.dataproc.v1beta2.JobController/UpdateJob",
2767	}
2768	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
2769		return srv.(JobControllerServer).UpdateJob(ctx, req.(*UpdateJobRequest))
2770	}
2771	return interceptor(ctx, in, info, handler)
2772}
2773
2774func _JobController_CancelJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
2775	in := new(CancelJobRequest)
2776	if err := dec(in); err != nil {
2777		return nil, err
2778	}
2779	if interceptor == nil {
2780		return srv.(JobControllerServer).CancelJob(ctx, in)
2781	}
2782	info := &grpc.UnaryServerInfo{
2783		Server:     srv,
2784		FullMethod: "/google.cloud.dataproc.v1beta2.JobController/CancelJob",
2785	}
2786	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
2787		return srv.(JobControllerServer).CancelJob(ctx, req.(*CancelJobRequest))
2788	}
2789	return interceptor(ctx, in, info, handler)
2790}
2791
2792func _JobController_DeleteJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
2793	in := new(DeleteJobRequest)
2794	if err := dec(in); err != nil {
2795		return nil, err
2796	}
2797	if interceptor == nil {
2798		return srv.(JobControllerServer).DeleteJob(ctx, in)
2799	}
2800	info := &grpc.UnaryServerInfo{
2801		Server:     srv,
2802		FullMethod: "/google.cloud.dataproc.v1beta2.JobController/DeleteJob",
2803	}
2804	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
2805		return srv.(JobControllerServer).DeleteJob(ctx, req.(*DeleteJobRequest))
2806	}
2807	return interceptor(ctx, in, info, handler)
2808}
2809
2810var _JobController_serviceDesc = grpc.ServiceDesc{
2811	ServiceName: "google.cloud.dataproc.v1beta2.JobController",
2812	HandlerType: (*JobControllerServer)(nil),
2813	Methods: []grpc.MethodDesc{
2814		{
2815			MethodName: "SubmitJob",
2816			Handler:    _JobController_SubmitJob_Handler,
2817		},
2818		{
2819			MethodName: "GetJob",
2820			Handler:    _JobController_GetJob_Handler,
2821		},
2822		{
2823			MethodName: "ListJobs",
2824			Handler:    _JobController_ListJobs_Handler,
2825		},
2826		{
2827			MethodName: "UpdateJob",
2828			Handler:    _JobController_UpdateJob_Handler,
2829		},
2830		{
2831			MethodName: "CancelJob",
2832			Handler:    _JobController_CancelJob_Handler,
2833		},
2834		{
2835			MethodName: "DeleteJob",
2836			Handler:    _JobController_DeleteJob_Handler,
2837		},
2838	},
2839	Streams:  []grpc.StreamDesc{},
2840	Metadata: "google/cloud/dataproc/v1beta2/jobs.proto",
2841}
2842